Skip to content

Commit

Permalink
Merge develop
Browse files Browse the repository at this point in the history
  • Loading branch information
ebuildy authored Feb 22, 2022
2 parents f23ce0f + 255b593 commit 9f2a611
Show file tree
Hide file tree
Showing 69 changed files with 1,128 additions and 733 deletions.
1 change: 1 addition & 0 deletions .dockerignore
Original file line number Diff line number Diff line change
@@ -1,5 +1,6 @@
*
!dev
!build
!packages
!VERSION
!README.md
7 changes: 6 additions & 1 deletion .github/workflows/ci.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -26,4 +26,9 @@ jobs:
uses: gradle/gradle-build-action@4137be6a8bf7d7133955359dbd952c0ca73b1021
with:
arguments: |
-Dgpg.passphrase=${{ secrets.OSSRH_GPG_SECRET_KEY_PASSWORD }} -x test djobiAssemble
-Dgpg.passphrase=${{ secrets.OSSRH_GPG_SECRET_KEY_PASSWORD }} test djobiAssemble
- name: Publish Test Report
uses: mikepenz/action-junit-report@v2
if: always() # always run even if the previous step fails
with:
report_paths: '**/build/test-results/test/TEST-*.xml'
2 changes: 1 addition & 1 deletion .github/workflows/release.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -26,7 +26,7 @@ jobs:
uses: gradle/gradle-build-action@4137be6a8bf7d7133955359dbd952c0ca73b1021
with:
arguments: |
-Prelease.useLastTag=true -Dgpg.passphrase=${{ secrets.OSSRH_GPG_SECRET_KEY_PASSWORD }} djobiAssemble
-Prelease.useLastTag=true -Dgpg.passphrase=${{ secrets.OSSRH_GPG_SECRET_KEY_PASSWORD }} test djobiAssemble
- name: upload artifacts
uses: actions/upload-artifact@v2
with:
Expand Down
67 changes: 28 additions & 39 deletions build.gradle
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,28 @@ plugins {
id 'nebula.release' version '16.0.0'
}

/* PROJECT DEPENDENCY VERSIONS */
// define all common versioned dependencies here
project.ext.dependencyStrings = [
GOOGLE_GUICE: 'com.google.inject:guice:5.0.1',
TYPESAFE_CONFIG: 'com.typesafe:config:1.4.1',
ORG_YAML: 'org.yaml:snakeyaml:1.29',
OKHTTP: 'com.squareup.okhttp3:okhttp:4.9.3',
ELASTIC_APM: 'co.elastic.apm:apm-agent-api:1.29.0',
JANSI: 'org.fusesource.jansi:jansi:2.4.0',
PICOCLI: 'info.picocli:picocli:4.6.2'
]

/**
* Spark provided dependencies
* Can be used with compileOnly
*/
project.ext.sparkDependencyStrings = [
JACKSON_CORE: 'com.fasterxml.jackson.core:jackson-core:2.10.0',
JCRAFT: 'com.jcraft:jsch:0.1.55',
APACHE_COMMONS_TEXT: 'org.apache.commons:commons-text:1.4'
]

allprojects {
afterEvaluate { project ->
println "I'm configuring $project.name with version $project.version"
Expand Down Expand Up @@ -49,38 +71,6 @@ subprojects {
}

ext.djobi = [
/**
* Build a JAR to use if Djobi dont run with Spark Assembly (YARN..)
*/
sparkAssemblyProvide: {
String taskName = "sparkAssemblyProvide"

shadowJar.enabled = false

task(taskName, type: ShadowJar) {
doFirst {
println "Packaging ${project.name}->sparkAssemblyProvide "
}

from sourceSets.main.output
configurations = [project.configurations.sparkAssemblyProvided]

archiveFileName = "${project.name}-${project.version}-spark-assembly-provided-all.jar"

relocate 'com.typesafe.config', 'my.typesafe.config'
relocate 'com.google.inject', 'my.google.inject'
relocate 'com.google.common', 'my.google.common'
relocate 'okhttp3', 'my.okhttp3'
relocate 'okio', 'my.okio'

/*minimize {
exclude(dependency('org.elasticsearch:.*:.*'))
}*/
}

shadowJar.dependsOn(taskName)
},

/**
* Create a task: "makeRelease"
* - called by djobiAssemble
Expand Down Expand Up @@ -171,8 +161,7 @@ subprojects {
testImplementation(
[group: 'org.junit.jupiter', name: 'junit-jupiter-api', version: '5.7.2'],
[group: 'org.junit.jupiter', name: 'junit-jupiter-params', version: '5.7.2'],
[group: 'com.squareup.okhttp3', name: 'mockwebserver3-junit5', version: '5.0.0-alpha.2'],
[group: 'com.google.inject', name: 'guice', version: '5.0.1']
[group: 'com.squareup.okhttp3', name: 'mockwebserver3-junit5', version: '5.0.0-alpha.2']
)
}

Expand Down Expand Up @@ -297,11 +286,11 @@ build.dependsOn(testReport);
* Relocate some common libs, to prevent conflicts.
*/
shadowJar {
relocate 'com.typesafe.config', 'my.typesafe.config'
relocate 'com.google.inject', 'my.google.inject'
relocate 'com.google.common', 'my.google.common'
relocate 'okhttp3', 'my.okhttp3'
relocate 'okio','my.okio'
relocate 'com.typesafe.config', 'io.datatok.djobi.typesafe.config'
relocate 'com.google.inject', 'io.datatok.djobi.google.inject'
relocate 'com.google.common', 'io.datatok.djobi.google.common'
relocate 'okhttp3', 'io.datatok.djobi.okhttp3'
relocate 'okio','io.datatok.djobi.okio'

minimize()
}
Expand Down
2 changes: 1 addition & 1 deletion dev/default.conf
Original file line number Diff line number Diff line change
Expand Up @@ -91,7 +91,7 @@ djobi {
utils_country {
type: "table"
format: "parquet"
path: ${projectRoot}"/dev/data/utils_country"
path: ${?projectRoot}"/dev/data/utils_country"
columns: {
test: "toto"
}
Expand Down
24 changes: 17 additions & 7 deletions djobi-cli/build.gradle
Original file line number Diff line number Diff line change
Expand Up @@ -22,16 +22,23 @@ dependencies {
*/
compileOnly project(':djobi-core')

compileOnly project(path: ':djobi-core', configuration: "spark")
compileOnly project(path: ':djobi-core', configuration: "sparkAssemblyProvided")
compileOnly project(path: ':djobi-core', configuration: "sparkAWS")
compileOnly project(path: ':djobi-core', configuration: "djobiCore")
/**
* Provided by Spark - but we dont want Spark dep here for cli
*/
compileOnly sparkDependencyStrings.JACKSON_CORE
compileOnly sparkDependencyStrings.JCRAFT
compileOnly sparkDependencyStrings.APACHE_COMMONS_TEXT

/**
* Need core tests code
*/
testImplementation(project(":djobi-tests"))

testRuntimeOnly(
project(path: ':djobi-core'),
project(path: ':djobi-core', configuration: 'spark')
)

/**
* To run Djobi via Idea.
*/
Expand All @@ -41,9 +48,12 @@ dependencies {
[group: 'com.squareup.okhttp3', name: 'okhttp', version: '4.3.1'],
)

implementation(
[group: 'info.picocli', name: 'picocli', version: '3.9.5']
)
/**
* Optional dep as it may not work on ARM64
*/
compileOnly dependencyStrings.JANSI

implementation dependencyStrings.PICOCLI
}

run {
Expand Down
18 changes: 15 additions & 3 deletions djobi-cli/src/main/java/io/datatok/djobi/Main.java
Original file line number Diff line number Diff line change
@@ -1,15 +1,19 @@
package io.datatok.djobi;

import com.google.inject.Injector;
import io.datatok.djobi.application.ApplicationBuilder;
import io.datatok.djobi.application.Djobi;
import io.datatok.djobi.application.exceptions.BuildApplicationException;
import io.datatok.djobi.cli.CommandFactory;
import io.datatok.djobi.cli.CommandKernel;
import io.datatok.djobi.cli.StdoutReporter;
import io.datatok.djobi.cli.utils.CLISimpleUtils;
import io.datatok.djobi.cli.utils.CLIUtils;
import io.datatok.djobi.plugins.report.Reporter;
import io.datatok.djobi.plugins.s3.S3Plugin;
import io.datatok.djobi.plugins.stages.DefaultActionsPlugin;
import io.datatok.djobi.spark.SparkPlugin;
import io.datatok.djobi.utils.ClassUtils;
import picocli.CommandLine;

import java.io.Serializable;

Expand Down Expand Up @@ -41,7 +45,10 @@ public static void main(String[] args) {
builder.addPlugin(new S3Plugin());
}

builder.addDependency(Reporter.class, StdoutReporter.class);
builder
.addDependency(Reporter.class, StdoutReporter.class)
.addDependency(CLIUtils.class, CLISimpleUtils.class)
;

application = builder.loadPlugins().build();
} catch(BuildApplicationException e) {
Expand All @@ -52,8 +59,13 @@ public static void main(String[] args) {
return ;
}

Injector injector = application.getInjector();

CommandKernel commandKernel = injector.getInstance(CommandKernel.class);
CommandLine rootCommandImpl = commandKernel.getRootCommand();

try {
application.getInjector().getInstance(CommandFactory.class).run(args);
rootCommandImpl.execute(args);
} catch(RuntimeException e) {
e.printStackTrace();
}
Expand Down
21 changes: 10 additions & 11 deletions djobi-cli/src/main/java/io/datatok/djobi/cli/CommandFactory.java
Original file line number Diff line number Diff line change
@@ -1,29 +1,28 @@
package io.datatok.djobi.cli;

import com.google.inject.Inject;
import com.google.inject.Injector;
import io.datatok.djobi.cli.commands.DjobiCommand;
import org.fusesource.jansi.AnsiConsole;
import com.google.inject.Singleton;
import io.datatok.djobi.cli.utils.CLIUtils;
import picocli.CommandLine;

import javax.inject.Inject;
import javax.inject.Singleton;
import java.util.List;
import java.util.Map;

@Singleton
public class CommandFactory implements CommandLine.IFactory {

@Inject
private Injector injector;

public CommandFactory() {
AnsiConsole.systemInstall();
}

public void run(final String[] args) {
CommandLine.run(DjobiCommand.class, this, args);
}
@Inject
private CLIUtils cliUtils;

@Override
public <K> K create(Class<K> cls) throws Exception {
if (cls.equals(Map.class) || cls.equals(List.class)) {
return CommandLine.defaultFactory().create(cls);
}
return injector.getInstance(cls);
}
}
25 changes: 25 additions & 0 deletions djobi-cli/src/main/java/io/datatok/djobi/cli/CommandKernel.java
Original file line number Diff line number Diff line change
@@ -0,0 +1,25 @@
package io.datatok.djobi.cli;

import com.google.inject.Inject;
import com.google.inject.Provider;
import com.google.inject.Singleton;
import io.datatok.djobi.cli.commands.RootCommand;
import picocli.CommandLine;

@Singleton
public class CommandKernel {

@Inject
private CommandFactory commandFactory;

@Inject
private Provider<RootCommand> rootCommandProvider;

public CommandLine getRootCommand() {
return new CommandLine(rootCommandProvider.get(), commandFactory);
}

public void run(String... args) {
getRootCommand().execute(args);
}
}
10 changes: 7 additions & 3 deletions djobi-cli/src/main/java/io/datatok/djobi/cli/StdoutReporter.java
Original file line number Diff line number Diff line change
@@ -1,5 +1,7 @@
package io.datatok.djobi.cli;

import com.google.inject.Inject;
import com.google.inject.Singleton;
import io.datatok.djobi.cli.utils.CLIOutUtils;
import io.datatok.djobi.cli.utils.CLIUtils;
import io.datatok.djobi.engine.Job;
Expand All @@ -8,9 +10,8 @@
import io.datatok.djobi.engine.stage.Stage;
import io.datatok.djobi.plugins.report.Reporter;
import io.datatok.djobi.utils.ClassUtils;
import org.apache.commons.lang.StringUtils;
import org.apache.commons.lang3.StringUtils;

import javax.inject.Singleton;
import java.io.PrintStream;
import java.util.Map;
import java.util.stream.Collectors;
Expand All @@ -20,11 +21,14 @@
@Singleton
public class StdoutReporter implements Reporter {

@Inject
private CLIUtils cliUtils;

private PrintStream stdoutPrintStream = System.out;

@Override
public void output(String format, Object... args) {
CLIUtils.output(format, args);
cliUtils.output(format, args);
}

@Override
Expand Down
Original file line number Diff line number Diff line change
@@ -1,23 +1,20 @@
package io.datatok.djobi.cli.commands;

import com.fasterxml.jackson.core.JsonProcessingException;
import io.datatok.djobi.cli.utils.CLIUtils;
import io.datatok.djobi.cli.utils.PipelineRequestFactory;
import io.datatok.djobi.engine.Job;
import io.datatok.djobi.engine.Pipeline;
import io.datatok.djobi.engine.PipelineExecutionRequest;
import io.datatok.djobi.engine.phases.ActionPhases;
import io.datatok.djobi.engine.phases.StagePhaseMetaData;
import io.datatok.djobi.engine.stage.ActionFactory;
import io.datatok.djobi.loaders.yaml.YAMLPipelineLoader;
import io.datatok.djobi.plugins.report.Reporter;
import io.datatok.djobi.utils.JSONUtils;
import org.apache.commons.lang.StringUtils;
import org.fusesource.jansi.AnsiConsole;
import org.apache.commons.lang3.StringUtils;
import picocli.CommandLine;

import javax.inject.Inject;
import java.io.IOException;
import java.util.Arrays;
import java.util.Map;

@CommandLine.Command(name = "pipeline", description = "dump a pipeline")
Expand All @@ -32,6 +29,9 @@ public class DumpPipelineCommand implements Runnable {
@Inject
ActionFactory actionFactory;

@Inject
PipelineRequestFactory pipelineRequestFactory;

@CommandLine.ParentCommand
DumpCommand dumpCommand;

Expand All @@ -48,10 +48,10 @@ public class DumpPipelineCommand implements Runnable {
public void run() {
Pipeline pipeline = null;

AnsiConsole.systemInstall();

try {
pipeline = pipelineLoader.get(PipelineExecutionRequest.build(pipelinePath, args).setJobsFilter(Arrays.asList(jobs.split(","))));
pipeline = pipelineLoader.get(
pipelineRequestFactory.build(pipelinePath, args, null, jobs, "", null)
);
} catch (IOException e) {
e.printStackTrace();
}
Expand Down
Loading

0 comments on commit 9f2a611

Please sign in to comment.