Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

IGNITE-14714 LineLength checkstyle rule added #9106

Merged
merged 11 commits into from
May 19, 2021
  •  
  •  
  •  
2 changes: 2 additions & 0 deletions checkstyle/checkstyle-suppressions.xml
Original file line number Diff line number Diff line change
Expand Up @@ -24,4 +24,6 @@
<suppress checks="EmptyLineSeparator|MethodParamPad|SingleSpaceSeparator"
files="BCrypt\.java|ConcurrentLinkedDeque8\.java"/>
<suppress checks="NoWhitespaceBefore" files="ConcurrentLinkedHashMap\.java"/>
<suppress checks="LineLength"
files="ClusterTagGenerator\.java|PagesWriteSpeedBasedThrottle\.java|GridExecutorService\.java|CatboostClassificationModel\.java"/>
</suppressions>
8 changes: 8 additions & 0 deletions checkstyle/checkstyle.xml
Original file line number Diff line number Diff line change
Expand Up @@ -135,4 +135,12 @@
-->
<module name="GenericWhitespace"/>
</module>

<!--
Checks that the line length not exceeds 140 chars.
See: https://checkstyle.org/config_sizes.html#LineLength
-->
<module name="LineLength">
<property name="max" value="140"/>
</module>
</module>
Original file line number Diff line number Diff line change
Expand Up @@ -62,7 +62,8 @@ public static void main(String[] args) throws IOException {
try {
dataCache = new SandboxMLCache(ignite).fillCacheWith(MLSandboxDatasets.TWO_CLASSED_IRIS);

Vectorizer<Integer, Vector, Integer, Double> vectorizer = new DummyVectorizer<Integer>().labeled(Vectorizer.LabelCoordinate.FIRST);
Vectorizer<Integer, Vector, Integer, Double> vectorizer =
new DummyVectorizer<Integer>().labeled(Vectorizer.LabelCoordinate.FIRST);

KMeansTrainer trainer = new KMeansTrainer();

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -80,7 +80,8 @@ public static void main(String[] args) throws Exception {

Vectorizer<Integer, Vector, Integer, Double> vectorizer = new DummyVectorizer<>(1);

IgniteFunction<LabeledVector<Double>, LabeledVector<double[]>> func = lv -> new LabeledVector<>(lv.features(), new double[] {lv.label()});
IgniteFunction<LabeledVector<Double>, LabeledVector<double[]>> func =
lv -> new LabeledVector<>(lv.features(), new double[] {lv.label()});

//NOTE: This class is part of Developer API and all lambdas should be loaded on server manually.
Preprocessor<Integer, Vector> preprocessor = new PatchedPreprocessor<>(func, vectorizer);
Expand All @@ -89,18 +90,19 @@ public static void main(String[] args) throws Exception {
SimpleLabeledDatasetDataBuilder<Integer, Vector, AlgorithmSpecificPartitionContext> builder =
new SimpleLabeledDatasetDataBuilder<>(preprocessor);

IgniteBiFunction<SimpleLabeledDatasetData, AlgorithmSpecificPartitionContext, SimpleLabeledDatasetData> builderFun = (data, ctx) -> {
double[] features = data.getFeatures();
int rows = data.getRows();
IgniteBiFunction<SimpleLabeledDatasetData, AlgorithmSpecificPartitionContext, SimpleLabeledDatasetData> builderFun =
(data, ctx) -> {
double[] features = data.getFeatures();
int rows = data.getRows();

// Makes a copy of features to supplement it by columns with values equal to 1.0.
double[] a = new double[features.length + rows];
Arrays.fill(a, 1.0);
// Makes a copy of features to supplement it by columns with values equal to 1.0.
double[] a = new double[features.length + rows];
Arrays.fill(a, 1.0);

System.arraycopy(features, 0, a, rows, features.length);
System.arraycopy(features, 0, a, rows, features.length);

return new SimpleLabeledDatasetData(a, data.getLabels(), rows);
};
return new SimpleLabeledDatasetData(a, data.getLabels(), rows);
};

try (AlgorithmSpecificDataset dataset = DatasetFactory.create(
ignite,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -53,7 +53,8 @@ public class CatboostClassificationModelParserExample {
/**
* Test expected results.
*/
private static final String TEST_ER_RES = "examples/src/main/resources/datasets/amazon-employee-access-challenge-sample-catboost-expected-results.csv";
private static final String TEST_ER_RES =
"examples/src/main/resources/datasets/amazon-employee-access-challenge-sample-catboost-expected-results.csv";

/**
* Parser.
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -62,7 +62,8 @@ public static void main(String[] args) throws IOException {
try {
dataCache = new SandboxMLCache(ignite).fillCacheWith(MLSandboxDatasets.TWO_CLASSED_IRIS);

Vectorizer<Integer, Vector, Integer, Double> vectorizer = new DummyVectorizer<Integer>().labeled(Vectorizer.LabelCoordinate.FIRST);
Vectorizer<Integer, Vector, Integer, Double> vectorizer =
new DummyVectorizer<Integer>().labeled(Vectorizer.LabelCoordinate.FIRST);

KMeansTrainer trainer = new KMeansTrainer()
.withDistance(new WeightedMinkowskiDistance(2, new double[] {5.9360, 2.7700, 4.2600, 1.3260}));
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -50,15 +50,18 @@ public class DecisionTreeFromSparkExample {
.toPath().toAbsolutePath().toString();

/** Learning environment. */
public static final LearningEnvironment env = LearningEnvironmentBuilder.defaultBuilder().withParallelismStrategyTypeDependency(ParallelismStrategy.ON_DEFAULT_POOL)
.withLoggingFactoryDependency(ConsoleLogger.Factory.HIGH).buildForTrainer();
public static final LearningEnvironment env =
LearningEnvironmentBuilder.defaultBuilder().withParallelismStrategyTypeDependency(ParallelismStrategy.ON_DEFAULT_POOL)
.withLoggingFactoryDependency(ConsoleLogger.Factory.HIGH).buildForTrainer();

/**
* Run example.
*/
public static void main(String[] args) throws FileNotFoundException {
System.out.println();
System.out.println(">>> Decision Tree model loaded from Spark through serialization over partitioned dataset usage example started.");
System.out.println(
">>> Decision Tree model loaded from Spark through serialization over partitioned dataset usage example started."
);
// Start ignite grid.
try (Ignite ignite = Ignition.start("examples/config/example-ignite.xml")) {
System.out.println(">>> Ignite grid started.");
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -50,15 +50,18 @@ public class DecisionTreeRegressionFromSparkExample {
public static final String SPARK_MDL_PATH = "examples/src/main/resources/models/spark/serialized/dtreg";

/** Learning environment. */
public static final LearningEnvironment env = LearningEnvironmentBuilder.defaultBuilder().withParallelismStrategyTypeDependency(ParallelismStrategy.ON_DEFAULT_POOL)
.withLoggingFactoryDependency(ConsoleLogger.Factory.HIGH).buildForTrainer();
public static final LearningEnvironment env =
LearningEnvironmentBuilder.defaultBuilder().withParallelismStrategyTypeDependency(ParallelismStrategy.ON_DEFAULT_POOL)
.withLoggingFactoryDependency(ConsoleLogger.Factory.HIGH).buildForTrainer();

/**
* Run example.
*/
public static void main(String[] args) throws FileNotFoundException {
System.out.println();
System.out.println(">>> Decision tree regression model loaded from Spark through serialization over partitioned dataset usage example started.");
System.out.println(
">>> Decision tree regression model loaded from Spark through serialization over partitioned dataset usage example started."
);
// Start ignite grid.
try (Ignite ignite = Ignition.start("examples/config/example-ignite.xml")) {
System.out.println(">>> Ignite grid started.");
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -48,15 +48,18 @@ public class GBTFromSparkExample {
public static final String SPARK_MDL_PATH = "examples/src/main/resources/models/spark/serialized/gbt";

/** Learning environment. */
public static final LearningEnvironment env = LearningEnvironmentBuilder.defaultBuilder().withParallelismStrategyTypeDependency(ParallelismStrategy.ON_DEFAULT_POOL)
.withLoggingFactoryDependency(ConsoleLogger.Factory.HIGH).buildForTrainer();
public static final LearningEnvironment env =
LearningEnvironmentBuilder.defaultBuilder().withParallelismStrategyTypeDependency(ParallelismStrategy.ON_DEFAULT_POOL)
.withLoggingFactoryDependency(ConsoleLogger.Factory.HIGH).buildForTrainer();

/**
* Run example.
*/
public static void main(String[] args) throws FileNotFoundException {
System.out.println();
System.out.println(">>> Gradient Boosted trees model loaded from Spark through serialization over partitioned dataset usage example started.");
System.out.println(
">>> Gradient Boosted trees model loaded from Spark through serialization over partitioned dataset usage example started."
);
// Start ignite grid.
try (Ignite ignite = Ignition.start("examples/config/example-ignite.xml")) {
System.out.println(">>> Ignite grid started.");
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -50,15 +50,18 @@ public class GBTRegressionFromSparkExample {
public static final String SPARK_MDL_PATH = "examples/src/main/resources/models/spark/serialized/gbtreg";

/** Learning environment. */
public static final LearningEnvironment env = LearningEnvironmentBuilder.defaultBuilder().withParallelismStrategyTypeDependency(ParallelismStrategy.ON_DEFAULT_POOL)
.withLoggingFactoryDependency(ConsoleLogger.Factory.HIGH).buildForTrainer();
public static final LearningEnvironment env =
LearningEnvironmentBuilder.defaultBuilder().withParallelismStrategyTypeDependency(ParallelismStrategy.ON_DEFAULT_POOL)
.withLoggingFactoryDependency(ConsoleLogger.Factory.HIGH).buildForTrainer();

/**
* Run example.
*/
public static void main(String[] args) throws FileNotFoundException {
System.out.println();
System.out.println(">>> GBT Regression model loaded from Spark through serialization over partitioned dataset usage example started.");
System.out.println(
">>> GBT Regression model loaded from Spark through serialization over partitioned dataset usage example started."
);
// Start ignite grid.
try (Ignite ignite = Ignition.start("examples/config/example-ignite.xml")) {
System.out.println(">>> Ignite grid started.");
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -50,8 +50,9 @@ public class KMeansFromSparkExample {
public static final String SPARK_MDL_PATH = "examples/src/main/resources/models/spark/serialized/kmeans";

/** Learning environment. */
public static final LearningEnvironment env = LearningEnvironmentBuilder.defaultBuilder().withParallelismStrategyTypeDependency(ParallelismStrategy.ON_DEFAULT_POOL)
.withLoggingFactoryDependency(ConsoleLogger.Factory.HIGH).buildForTrainer();
public static final LearningEnvironment env =
LearningEnvironmentBuilder.defaultBuilder().withParallelismStrategyTypeDependency(ParallelismStrategy.ON_DEFAULT_POOL)
.withLoggingFactoryDependency(ConsoleLogger.Factory.HIGH).buildForTrainer();

/**
* Run example.
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -50,15 +50,17 @@ public class LinearRegressionFromSparkExample {
public static final String SPARK_MDL_PATH = "examples/src/main/resources/models/spark/serialized/linreg";

/** Learning environment. */
public static final LearningEnvironment env = LearningEnvironmentBuilder.defaultBuilder().withParallelismStrategyTypeDependency(ParallelismStrategy.ON_DEFAULT_POOL)
.withLoggingFactoryDependency(ConsoleLogger.Factory.HIGH).buildForTrainer();
public static final LearningEnvironment env =
LearningEnvironmentBuilder.defaultBuilder().withParallelismStrategyTypeDependency(ParallelismStrategy.ON_DEFAULT_POOL)
.withLoggingFactoryDependency(ConsoleLogger.Factory.HIGH).buildForTrainer();

/**
* Run example.
*/
public static void main(String[] args) throws FileNotFoundException {
System.out.println();
System.out.println(">>> Linear regression model loaded from Spark through serialization over partitioned dataset usage example started.");
System.out.println(
">>> Linear regression model loaded from Spark through serialization over partitioned dataset usage example started.");
// Start ignite grid.
try (Ignite ignite = Ignition.start("examples/config/example-ignite.xml")) {
System.out.println(">>> Ignite grid started.");
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -48,15 +48,18 @@ public class LogRegFromSparkExample {
public static final String SPARK_MDL_PATH = "examples/src/main/resources/models/spark/serialized/logreg";

/** Learning environment. */
public static final LearningEnvironment env = LearningEnvironmentBuilder.defaultBuilder().withParallelismStrategyTypeDependency(ParallelismStrategy.ON_DEFAULT_POOL)
.withLoggingFactoryDependency(ConsoleLogger.Factory.HIGH).buildForTrainer();
public static final LearningEnvironment env =
LearningEnvironmentBuilder.defaultBuilder().withParallelismStrategyTypeDependency(ParallelismStrategy.ON_DEFAULT_POOL)
.withLoggingFactoryDependency(ConsoleLogger.Factory.HIGH).buildForTrainer();

/**
* Run example.
*/
public static void main(String[] args) throws FileNotFoundException {
System.out.println();
System.out.println(">>> Logistic regression model loaded from Spark through serialization over partitioned dataset usage example started.");
System.out.println(
">>> Logistic regression model loaded from Spark through serialization over partitioned dataset usage example started."
);
// Start ignite grid.
try (Ignite ignite = Ignition.start("examples/config/example-ignite.xml")) {
System.out.println(">>> Ignite grid started.");
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -48,15 +48,18 @@ public class RandomForestFromSparkExample {
public static final String SPARK_MDL_PATH = "examples/src/main/resources/models/spark/serialized/rf";

/** Learning environment. */
public static final LearningEnvironment env = LearningEnvironmentBuilder.defaultBuilder().withParallelismStrategyTypeDependency(ParallelismStrategy.ON_DEFAULT_POOL)
.withLoggingFactoryDependency(ConsoleLogger.Factory.HIGH).buildForTrainer();
public static final LearningEnvironment env =
LearningEnvironmentBuilder.defaultBuilder().withParallelismStrategyTypeDependency(ParallelismStrategy.ON_DEFAULT_POOL)
.withLoggingFactoryDependency(ConsoleLogger.Factory.HIGH).buildForTrainer();

/**
* Run example.
*/
public static void main(String[] args) throws FileNotFoundException {
System.out.println();
System.out.println(">>> Random Forest model loaded from Spark through serialization over partitioned dataset usage example started.");
System.out.println(
">>> Random Forest model loaded from Spark through serialization over partitioned dataset usage example started."
);
// Start ignite grid.
try (Ignite ignite = Ignition.start("examples/config/example-ignite.xml")) {
System.out.println(">>> Ignite grid started.");
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -50,15 +50,18 @@ public class RandomForestRegressionFromSparkExample {
public static final String SPARK_MDL_PATH = "examples/src/main/resources/models/spark/serialized/rfreg";

/** Learning environment. */
public static final LearningEnvironment env = LearningEnvironmentBuilder.defaultBuilder().withParallelismStrategyTypeDependency(ParallelismStrategy.ON_DEFAULT_POOL)
.withLoggingFactoryDependency(ConsoleLogger.Factory.HIGH).buildForTrainer();
public static final LearningEnvironment env =
LearningEnvironmentBuilder.defaultBuilder().withParallelismStrategyTypeDependency(ParallelismStrategy.ON_DEFAULT_POOL)
.withLoggingFactoryDependency(ConsoleLogger.Factory.HIGH).buildForTrainer();

/**
* Run example.
*/
public static void main(String[] args) throws FileNotFoundException {
System.out.println();
System.out.println(">>> Random Forest regression model loaded from Spark through serialization over partitioned dataset usage example started.");
System.out.println(
">>> Random Forest regression model loaded from Spark through serialization over partitioned dataset usage example started."
);
// Start ignite grid.
try (Ignite ignite = Ignition.start("examples/config/example-ignite.xml")) {
System.out.println(">>> Ignite grid started.");
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -48,8 +48,9 @@ public class SVMFromSparkExample {
public static final String SPARK_MDL_PATH = "examples/src/main/resources/models/spark/serialized/svm";

/** Learning environment. */
public static final LearningEnvironment env = LearningEnvironmentBuilder.defaultBuilder().withParallelismStrategyTypeDependency(ParallelismStrategy.ON_DEFAULT_POOL)
.withLoggingFactoryDependency(ConsoleLogger.Factory.HIGH).buildForTrainer();
public static final LearningEnvironment env =
LearningEnvironmentBuilder.defaultBuilder().withParallelismStrategyTypeDependency(ParallelismStrategy.ON_DEFAULT_POOL)
.withLoggingFactoryDependency(ConsoleLogger.Factory.HIGH).buildForTrainer();

/**
* Run example.
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -145,7 +145,12 @@ public static void main(String[] args) throws IOException {

confusionMtxWithMinMaxScaling[idx1][idx2]++;

System.out.printf(">>> | %.4f\t\t| %.4f\t\t\t\t\t\t| %.4f\t\t|\n", prediction, predictionWithMinMaxScaling, groundTruth);
System.out.printf(
">>> | %.4f\t\t| %.4f\t\t\t\t\t\t| %.4f\t\t|\n",
prediction,
predictionWithMinMaxScaling,
groundTruth
);
}
System.out.println(">>> ----------------------------------------------------------------");
System.out.println("\n>>> -----------------One-vs-Rest SVM model-------------");
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -71,8 +71,8 @@ public static void main(String[] args) {
Set<Integer> targetEncodedfeaturesIndexies = new HashSet<>(Arrays.asList(1, 5, 6));
Integer targetIndex = 0;

final Vectorizer<Integer, Object[], Integer, Object> vectorizer = new ObjectArrayVectorizer<Integer>(featuresIndexies.toArray(new Integer[0]))
.labeled(targetIndex);
final Vectorizer<Integer, Object[], Integer, Object> vectorizer =
new ObjectArrayVectorizer<Integer>(featuresIndexies.toArray(new Integer[0])).labeled(targetIndex);

Preprocessor<Integer, Object[]> strEncoderPreprocessor = new EncoderTrainer<Integer, Object[]>()
.withEncoderType(EncoderType.STRING_ENCODER)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -42,7 +42,8 @@
* Description of model can be found in: https://en.wikipedia.org/wiki/Linear_regression . Original dataset can be
* downloaded from: https://archive.ics.uci.edu/ml/machine-learning-databases/housing/ . Copy of dataset are stored in:
* modules/ml/src/main/resources/datasets/boston_housing_dataset.txt . Score for regression estimation: R^2 (coefficient
* of determination). Description of score evaluation can be found in: https://stattrek.com/statistics/dictionary.aspx?definition=coefficient_of_determination
* of determination). Description of score evaluation can be found in:
* https://stattrek.com/statistics/dictionary.aspx?definition=coefficient_of_determination
* .
*/
public class BostonHousePricesPredictionExample {
Expand Down
Loading