Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Graphql update #144

Closed
wants to merge 12 commits into from
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -353,7 +353,7 @@
<dependency>
<groupId>com.graphql-java</groupId>
<artifactId>graphql-java</artifactId>
<version>4.2</version>
<version>10.0</version>
</dependency>
</dependencies>
</project>
113 changes: 88 additions & 25 deletions src/main/java/com/conveyal/gtfs/graphql/GTFSGraphQL.java
Original file line number Diff line number Diff line change
@@ -1,41 +1,104 @@
package com.conveyal.gtfs.graphql;

import com.conveyal.gtfs.graphql.fetchers.JDBCFetcher;
import graphql.ExecutionInput;
import graphql.GraphQL;
import graphql.execution.instrumentation.dataloader.DataLoaderDispatcherInstrumentation;
import graphql.introspection.IntrospectionQuery;
import graphql.schema.DataFetchingEnvironment;
import org.dataloader.DataLoader;
import org.dataloader.DataLoaderRegistry;

import javax.sql.DataSource;
import java.sql.Connection;
import java.sql.SQLException;

/**
* This provides a GraphQL API around the gtfs-lib JDBC storage.
* This just makes a Java API with the right schema available, which uses String requests and responses.
* To make this into a web API you need to wrap it in an HTTP framework / server.
*/
import java.util.List;
import java.util.Map;

import static com.conveyal.gtfs.graphql.fetchers.JDBCFetcher.newJdbcDataLoader;

public class GTFSGraphQL {

private static DataSource dataSource;
/**
* Execute a graphQL request. This method creates and executes a new graphQL runner that is able to cache and batch
* sql queries during the course of obtaining the data according to the graphQL query. The resulting output is a
* map of objects. This map is typically converted to JSON or some other kind of output via a downstream
* application using this method.
*
* A DataLoader (https://github.com/graphql-java/java-dataloader) handles The batching and caching of requests.
* java-dataloader requires manual dispatching in order to actually execute the code to fetch data. However, there
* is a helper class called DataLoaderDispatcherInstrumentation that automatically dispatches dataloaders. This was
* setup according to this guide: https://www.graphql-java.com/documentation/v10/batching/
*
* @param dataSource The dataSource to use in connecting to a database to make sql queries.
* @param query The graphQL query
* @param variables The variables to apply to the graphQL query
*/
public static Map<String, Object> run (DataSource dataSource, String query, Map<String,Object> variables) {
// the registry and dataLoaders must be created upon each request to avoid caching loaded data after each request
DataLoaderRegistry registry = new DataLoaderRegistry();
DataLoader<JDBCFetcher.JdbcQuery, List<Map<String, Object>>> jdbcQueryDataLoader = newJdbcDataLoader();
registry.register("jdbcfetcher", jdbcQueryDataLoader);

return GraphQL.newGraphQL(GraphQLGtfsSchema.feedBasedSchema)
// This instrumentation implementation will dispatch all the dataloaders as each level of the graphql query
// is executed and hence make batched objects available to the query and the associated DataFetchers. If
// the dataloader was not recreated on each request and reused between different graphql queries, the
// dataloader could end up returning cached data for the result of a sql query even if the actual data in
// the database had changed. Therefore, the dataloader and registry must be created new each time to avoid
// caching sql query results in future requests.
.instrumentation(new DataLoaderDispatcherInstrumentation(registry))
.build()
// we now have a graphQL schema with dataloaders instrumented. Time to make a query
.execute(
// Build the execution input giving an environment context, query and variables. The environment
// context is used primarily in the JDBCFetcher to get the current loader and dataSource from which
// the sql queries can be executed.
ExecutionInput.newExecutionInput()
.context(new GraphQLExecutionContext(jdbcQueryDataLoader, dataSource))
.query(query)
.variables(variables)
.build()
)
.toSpecification();
}

// TODO Is it correct to share one of these objects between many instances? Is it supposed to be long-lived or threadsafe?
// Analysis-backend creates a new GraphQL object on every request.
private static GraphQL GRAPHQL;
public static final Map<String, Object> introspectedSchema = GraphQL.newGraphQL(GraphQLGtfsSchema.feedBasedSchema)
.build()
.execute(IntrospectionQuery.INTROSPECTION_QUERY)
.toSpecification();

/** Username and password can be null if connecting to a local instance with host-based authentication. */
public static void initialize (DataSource dataSource) {
GTFSGraphQL.dataSource = dataSource;
GRAPHQL = GraphQL.newGraphQL(GraphQLGtfsSchema.feedBasedSchema)
.build();
/**
* Helper to obtain the jdbcQueryLoader. It is critical that the same dataloader in the DataloaderRegistry /
* DataLoaderDispatcherInstrumentation be used otherwise the queries will never be able to be dispatched.
*/
public static DataLoader<JDBCFetcher.JdbcQuery, List<Map<String, Object>>> getJdbcQueryDataLoaderFromContext (
DataFetchingEnvironment environment
) {
return ((GraphQLExecutionContext)environment.getContext()).jdbcQueryDataLoader;
}

public static Connection getConnection() {
try {
return dataSource.getConnection();
} catch (SQLException ex) {
throw new RuntimeException(ex);
/**
* Helper method to extract the DataSource from the DataFetchingEnvironment. Whenever we a sql query needs to be
* made, this method will have to be called to make sure the same dataSource is being used within the same graphQL
* query.
*/
public static DataSource getDataSourceFromContext (DataFetchingEnvironment environment) {
DataSource dataSource = ((GraphQLExecutionContext)environment.getContext()).dataSource;
if (dataSource == null) {
throw new RuntimeException("DataSource is not defined, unable to make sql query!");
}
return dataSource;
}

public static GraphQL getGraphQl () {
return GRAPHQL;
}
/**
* The object used as the DataFetchingEnvironment context in graphQL queries.
*/
private static class GraphQLExecutionContext {
private final DataLoader<JDBCFetcher.JdbcQuery, List<Map<String, Object>>> jdbcQueryDataLoader;
private final DataSource dataSource;

private GraphQLExecutionContext(DataLoader<JDBCFetcher.JdbcQuery, List<Map<String, Object>>> jdbcQueryDataLoader, DataSource dataSource) {
this.jdbcQueryDataLoader = jdbcQueryDataLoader;
this.dataSource = dataSource;
}
}
}
14 changes: 12 additions & 2 deletions src/main/java/com/conveyal/gtfs/graphql/GraphQLGtfsSchema.java
Original file line number Diff line number Diff line change
Expand Up @@ -24,7 +24,17 @@
import static com.conveyal.gtfs.graphql.GraphQLUtil.multiStringArg;
import static com.conveyal.gtfs.graphql.GraphQLUtil.string;
import static com.conveyal.gtfs.graphql.GraphQLUtil.stringArg;
import static com.conveyal.gtfs.graphql.fetchers.JDBCFetcher.*;
import static com.conveyal.gtfs.graphql.fetchers.JDBCFetcher.DATE_ARG;
import static com.conveyal.gtfs.graphql.fetchers.JDBCFetcher.FROM_ARG;
import static com.conveyal.gtfs.graphql.fetchers.JDBCFetcher.ID_ARG;
import static com.conveyal.gtfs.graphql.fetchers.JDBCFetcher.LIMIT_ARG;
import static com.conveyal.gtfs.graphql.fetchers.JDBCFetcher.MAX_LAT;
import static com.conveyal.gtfs.graphql.fetchers.JDBCFetcher.MAX_LON;
import static com.conveyal.gtfs.graphql.fetchers.JDBCFetcher.MIN_LAT;
import static com.conveyal.gtfs.graphql.fetchers.JDBCFetcher.MIN_LON;
import static com.conveyal.gtfs.graphql.fetchers.JDBCFetcher.OFFSET_ARG;
import static com.conveyal.gtfs.graphql.fetchers.JDBCFetcher.SEARCH_ARG;
import static com.conveyal.gtfs.graphql.fetchers.JDBCFetcher.TO_ARG;
import static graphql.Scalars.GraphQLFloat;
import static graphql.Scalars.GraphQLInt;
import static graphql.Scalars.GraphQLString;
Expand Down Expand Up @@ -347,6 +357,7 @@ public class GraphQLGtfsSchema {
.type(new GraphQLList(routeType))
.argument(stringArg("namespace"))
.argument(stringArg(SEARCH_ARG))
.argument(intArg(LIMIT_ARG))
.dataFetcher(new NestedJDBCFetcher(
new JDBCFetcher("pattern_stops", "stop_id", null, false),
new JDBCFetcher("patterns", "pattern_id", null, false),
Expand Down Expand Up @@ -783,7 +794,6 @@ public class GraphQLGtfsSchema {
// .mutation(someMutation)
.build();


private static class StringCoercing implements Coercing {
@Override
public Object serialize(Object input) {
Expand Down
19 changes: 16 additions & 3 deletions src/main/java/com/conveyal/gtfs/graphql/GraphQLUtil.java
Original file line number Diff line number Diff line change
@@ -1,9 +1,9 @@
package com.conveyal.gtfs.graphql;

import graphql.schema.FieldDataFetcher;
import graphql.schema.GraphQLArgument;
import graphql.schema.GraphQLFieldDefinition;
import graphql.schema.GraphQLList;
import graphql.schema.PropertyDataFetcher;

import static graphql.Scalars.GraphQLFloat;
import static graphql.Scalars.GraphQLInt;
Expand All @@ -17,15 +17,15 @@ public static GraphQLFieldDefinition string (String name) {
return newFieldDefinition()
.name(name)
.type(GraphQLString)
.dataFetcher(new FieldDataFetcher(name))
.dataFetcher(new PropertyDataFetcher(name))
.build();
}

public static GraphQLFieldDefinition intt (String name) {
return newFieldDefinition()
.name(name)
.type(GraphQLInt)
.dataFetcher(new FieldDataFetcher(name))
.dataFetcher(new PropertyDataFetcher(name))
.build();
}

Expand Down Expand Up @@ -57,4 +57,17 @@ public static GraphQLArgument floatArg (String name) {
.build();
}

/**
* Helper method to get the namespaced table name
*/
public static String namespacedTableName(String namespace, String tableName) {
return String.format("%s.%s", namespace, tableName);
}

/**
* Helper method to get the string of the column name with the namespace and table name
*/
public static String namespacedTableFieldName(String namespace, String tableName, String tableField) {
return String.format("%s.%s.%s", namespace, tableName, tableField);
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -8,6 +8,7 @@
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

import javax.sql.DataSource;
import java.sql.Connection;
import java.sql.ResultSet;
import java.sql.SQLException;
Expand Down Expand Up @@ -37,9 +38,10 @@ public Object get(DataFetchingEnvironment environment) {
List<ErrorCount> errorCounts = new ArrayList();
Map<String, Object> parentFeedMap = environment.getSource();
String namespace = (String) parentFeedMap.get("namespace");
DataSource dataSource = GTFSGraphQL.getDataSourceFromContext(environment);
Connection connection = null;
try {
connection = GTFSGraphQL.getConnection();
connection = dataSource.getConnection();
Statement statement = connection.createStatement();
String sql = String.format("select error_type, count(*) from %s.errors group by error_type", namespace);
LOG.info("SQL: {}", sql);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,7 @@
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

import javax.sql.DataSource;
import java.sql.Connection;
import java.sql.ResultSet;
import java.sql.ResultSetMetaData;
Expand All @@ -14,6 +15,8 @@
import java.util.HashMap;
import java.util.Map;

import static com.conveyal.gtfs.graphql.fetchers.JDBCFetcher.validateNamespace;

/**
* Fetch the summary row for a particular loaded feed, based on its namespace.
* This essentially gets the row from the top-level summary table of all feeds that have been loaded into the database.
Expand All @@ -25,11 +28,13 @@ public class FeedFetcher implements DataFetcher {
@Override
public Map<String, Object> get (DataFetchingEnvironment environment) {
String namespace = environment.getArgument("namespace"); // This is the unique table prefix (the "schema").
DataSource dataSource = GTFSGraphQL.getDataSourceFromContext(environment);
validateNamespace(namespace);
StringBuilder sqlBuilder = new StringBuilder();
sqlBuilder.append(String.format("select * from feeds where namespace = '%s'", namespace));
Connection connection = null;
try {
connection = GTFSGraphQL.getConnection();
connection = dataSource.getConnection();
Statement statement = connection.createStatement();
LOG.info("SQL: {}", sqlBuilder.toString());
if (statement.execute(sqlBuilder.toString())) {
Expand Down
Loading