package com.cadrlife.devsearch.agent;
import com.cadrlife.devsearch.agent.indexing.LocalRepoCrawler;
import com.cadrlife.devsearch.agent.service.*;
import com.cadrlife.devsearch.domain.Project;
import com.google.common.base.Preconditions;
import com.google.common.base.Throwables;
import com.google.common.util.concurrent.Futures;
import com.google.common.util.concurrent.ListenableFuture;
import com.google.common.util.concurrent.ListeningExecutorService;
import org.elasticsearch.client.Client;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import javax.inject.Inject;
import javax.inject.Named;
import java.io.Closeable;
import java.io.File;
import java.io.IOException;
import java.nio.file.Path;
import java.util.List;
import java.util.concurrent.*;
public class Agent implements Closeable {
private static final Logger LOG = LoggerFactory.getLogger(Agent.class);
private final LocalRepoCrawler crawler;
private final Client esClient;
private final CleanService cleanService;
private final SourcePullService sourcePullService;
private final Path checkoutRootPath;
private final RepoCreator repoCreator;
private final ScriptService scriptService;
private final ListeningExecutorService executorService;
@Inject
public Agent(LocalRepoCrawler crawler, Client esClient,
CleanService cleanService, SourcePullService sourcePullService,
@Named("checkout.root") Path checkoutRootPath, RepoCreator repoCreator,ScriptService scriptService,
ListeningExecutorService executorService
) {
this.esClient = esClient;
this.crawler = crawler;
this.cleanService = cleanService;
this.sourcePullService = sourcePullService;
this.checkoutRootPath = checkoutRootPath;
this.repoCreator = repoCreator;
this.scriptService = scriptService;
this.executorService = executorService;
}
// public void indexFromLocal(UpdateScope updateScope) {
// LOG.info("indexFromLocal {}", updateScope);
// try {
// crawler.walkRepo(updateScope);
// } catch (IOException e) {
// Throwables.propagate(e);
// }
// }
public void indexFromLocal(Project project) {
LOG.info("indexFromLocal {}", project);
try {
crawler.walkProject(project);
} catch (IOException e) {
Throwables.propagate(e);
}
}
public void pullFromSourceSync(UpdateScope updateScope) {
Iterable<ListenableFuture<Project>> futures = pullFromSourceAsync(updateScope);
try {
Futures.allAsList(futures).get();
} catch (InterruptedException | ExecutionException e) {
throw new RuntimeException("Not all projects succeeded", e);
}
}
public Iterable<ListenableFuture<Project>> pullFromSourceAsync(UpdateScope updateScope) {
LOG.info("pullFromSourceAsync on {}", updateScope);
RepoService repoService = repoCreator.createService(updateScope.getRepoName());
LOG.info("Source Type {}", repoService.getSourceType());
return sourcePullService.pullRepo(repoService, updateScope);
// TODO
// updateScope.getAffectedProjects().addAll(projects);
}
public void close() {
try {
crawler.close();
} catch (IOException e) {
LOG.error("Couldn't close crawler",e);
}
LOG.info("Shutting down executor service");
executorService.shutdownNow();
LOG.info("Shutting down Elasticsearch client");
esClient.close();
}
public void clean(UpdateScope updateScope) {
if (updateScope.isAllProjects()) {
cleanService.clean(checkoutRootPath.resolve(updateScope.getRepoName()));
} else {
cleanService.clean(checkoutRootPath.resolve(updateScope.getRepoName()).resolve(updateScope.getProjectName()));
}
}
public void listProjects(UpdateScope updateScope) {
LOG.info("Listing all projects in {}", updateScope.getRepoName());
RepoService repoService = repoCreator.createService(updateScope.getRepoName());
List <Project> projects = repoService.findAllProjects();
for (Project project : projects) {
LOG.info(project.getName());
}
LOG.info("{} total ", projects.size());
}
public void executeScript(UpdateScope updateScope, final String scriptFilePath) {
final File scriptFile = new File(scriptFilePath);
Preconditions.checkState(scriptFile.exists(), "Script file '%s' does not exist", scriptFilePath);
int nThreads = 3;
LOG.info("Using thread count {}", nThreads);
ExecutorService myExecutorService = Executors.newFixedThreadPool(nThreads);
for (final Project project : updateScope.getAffectedProjects()) {
myExecutorService.execute(new Runnable() {
@Override
public void run() {
LOG.info("Executing script '{}' on project '{}'", scriptFilePath, project.getName());
try {
scriptService.executeOnProject(scriptFile, project);
project.setUpdateComplete(true);
} catch (Exception e) {
LOG.error("Failed executing script '{}' on project '{}'", scriptFilePath, project.getName(), e);
}
}
});
}
myExecutorService.shutdown();
try {
while (!myExecutorService.awaitTermination(10, TimeUnit.SECONDS));
} catch (InterruptedException e) {
throw new RuntimeException(e);
}
}
public void pushToRepo(UpdateScope updateScope) {
RepoService repoService = repoCreator.createService(updateScope.getRepoName());
repoService.setCheckoutRootPath(checkoutRootPath);
for (Project project : updateScope.getAffectedProjects()) {
if (project.isUpdateComplete()) {
LOG.info("Pushing project '{}' to repo", project.getName());
repoService.pushRepo(project);
} else {
LOG.info("No complete updates to push for project '{}'", project.getName());
}
}
}
public void checkElasticsearchClient() {
LOG.info("Checking that Elasticsearch is available");
esClient.admin().indices().prepareExists("blah").execute().actionGet();
}
}