From b94769d2519b1c6d2200388948473208a8421758 Mon Sep 17 00:00:00 2001 From: Jacob Goldverg Date: Tue, 2 Jan 2024 15:06:10 -0500 Subject: [PATCH 001/150] removing the map as with virtual threads it causes heap space issues since virtual threads cause new threads to get created per pipeSize --- .../odstransferservice/service/step/vfs/VfsReader.java | 9 +-------- 1 file changed, 1 insertion(+), 8 deletions(-) diff --git a/src/main/java/org/onedatashare/transferservice/odstransferservice/service/step/vfs/VfsReader.java b/src/main/java/org/onedatashare/transferservice/odstransferservice/service/step/vfs/VfsReader.java index 527ba66a..25f235dd 100644 --- a/src/main/java/org/onedatashare/transferservice/odstransferservice/service/step/vfs/VfsReader.java +++ b/src/main/java/org/onedatashare/transferservice/odstransferservice/service/step/vfs/VfsReader.java @@ -32,14 +32,12 @@ public class VfsReader extends AbstractItemCountingItemStreamItemReader bufferMap; public VfsReader(AccountEndpointCredential credential, EntityInfo fInfo) { this.setExecutionContextName(ClassUtils.getShortName(VfsReader.class)); this.credential = credential; this.filePartitioner = new FilePartitioner(fInfo.getChunkSize()); this.fileInfo = fInfo; - bufferMap = new ConcurrentHashMap<>(); } @BeforeStep @@ -57,11 +55,7 @@ protected DataChunk doRead() { if (chunkParameters == null) return null;// done as there are no more FileParts in the queue logger.info("currently reading {}", chunkParameters); int totalBytes = 0; - ByteBuffer buffer = bufferMap.get(Thread.currentThread().getId()); - if (buffer == null) { - buffer = ByteBuffer.allocate(chunkParameters.getSize()); - bufferMap.put(Thread.currentThread().getId(), buffer); - } + ByteBuffer buffer = ByteBuffer.allocate(this.fileInfo.getChunkSize()); while (totalBytes < chunkParameters.getSize()) { int bytesRead = 0; try { @@ -99,6 +93,5 @@ protected void doClose() { logger.error("Not able to close the input Stream"); ex.printStackTrace(); } - this.bufferMap.clear(); } } From 5baa892b63d65aeac0dbc18bf56b35c465817b53 Mon Sep 17 00:00:00 2001 From: Jacob Goldverg Date: Tue, 2 Jan 2024 15:31:57 -0500 Subject: [PATCH 002/150] changed to file to have dynamic spring profiles --- Dockerfile | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/Dockerfile b/Dockerfile index 54411a54..a8f0990b 100644 --- a/Dockerfile +++ b/Dockerfile @@ -48,7 +48,6 @@ ENV PMETER_CRON_EXP="*/15 * * * * *" ENV OPTIMIZER_URL="${OPTIMIZER_URL}" ENV OPTIMIZER_ENABLE="${OPTIMIZER_ENABLE}" -ENV SPRING_PROFILE="${SPRING_PROFILE:-hsql}" ENV PATH "/home/ods/.local/bin:${PATH}" @@ -56,4 +55,4 @@ RUN mkdir -p $HOME/.pmeter/ RUN touch $HOME/.pmeter/transfer_service_pmeter_measure.txt EXPOSE 8092 -ENTRYPOINT ["java", "-Dspring.profiles.active=hsql","-jar", "/usr/local/lib/ods-transfer-service-0.0.1-SNAPSHOT.jar"] \ No newline at end of file +ENTRYPOINT ["java","-jar", "/usr/local/lib/ods-transfer-service-0.0.1-SNAPSHOT.jar"] \ No newline at end of file From c6fa7872345f3c6725f7af62de477f78c53f5d78 Mon Sep 17 00:00:00 2001 From: Jacob Goldverg Date: Tue, 2 Jan 2024 23:19:33 -0500 Subject: [PATCH 003/150] Found a few small bugs that were odd, also made the entityInfo give out json strings --- .../odstransferservice/model/EntityInfo.java | 12 ++++++++++++ .../odstransferservice/service/JobParamService.java | 6 ++++-- 2 files changed, 16 insertions(+), 2 deletions(-) diff --git a/src/main/java/org/onedatashare/transferservice/odstransferservice/model/EntityInfo.java b/src/main/java/org/onedatashare/transferservice/odstransferservice/model/EntityInfo.java index db6079ca..cefece32 100644 --- a/src/main/java/org/onedatashare/transferservice/odstransferservice/model/EntityInfo.java +++ b/src/main/java/org/onedatashare/transferservice/odstransferservice/model/EntityInfo.java @@ -1,5 +1,7 @@ package org.onedatashare.transferservice.odstransferservice.model; +import com.fasterxml.jackson.core.JsonProcessingException; +import com.fasterxml.jackson.databind.ObjectMapper; import lombok.AllArgsConstructor; import lombok.Data; import lombok.NoArgsConstructor; @@ -12,4 +14,14 @@ public class EntityInfo { private String path; private long size; private int chunkSize; + + @Override + public String toString(){ + ObjectMapper objectMapper = new ObjectMapper(); + try { + return objectMapper.writeValueAsString(this); + } catch (JsonProcessingException e) { + throw new RuntimeException(e); + } + } } diff --git a/src/main/java/org/onedatashare/transferservice/odstransferservice/service/JobParamService.java b/src/main/java/org/onedatashare/transferservice/odstransferservice/service/JobParamService.java index 2f52c965..6203ec4e 100644 --- a/src/main/java/org/onedatashare/transferservice/odstransferservice/service/JobParamService.java +++ b/src/main/java/org/onedatashare/transferservice/odstransferservice/service/JobParamService.java @@ -10,6 +10,7 @@ import org.slf4j.LoggerFactory; import org.springframework.batch.core.JobParameters; import org.springframework.batch.core.JobParametersBuilder; +import org.springframework.beans.factory.annotation.Value; import org.springframework.stereotype.Service; import java.net.URI; @@ -22,7 +23,8 @@ public class JobParamService { Logger logger = LoggerFactory.getLogger(JobParamService.class); - + @Value("${spring.application.name}") + private String appName; /** * Here we are adding basically the whole request except for sensitive credentials to the Job Params table. * B/C we do not add @@ -52,7 +54,7 @@ public JobParameters translate(JobParametersBuilder builder, TransferJobRequest builder.addLong(PIPELINING, (long) request.getOptions().getPipeSize()); builder.addString(COMPRESS, String.valueOf(request.getOptions().getCompress())); builder.addLong(RETRY, (long) request.getOptions().getRetry()); - builder.addString(APP_NAME, System.getenv("APP_NAME")); + builder.addString(APP_NAME, this.appName); builder.addString(OPTIMIZER, request.getOptions().getOptimizer()); builder.addLong(FILE_COUNT, (long) request.getSource().getInfoList().size()); long totalSize = 0L; From 7834d4c26d8a417a156f713efc78882ec0e4f996 Mon Sep 17 00:00:00 2001 From: Jacob Goldverg Date: Wed, 3 Jan 2024 13:25:39 -0500 Subject: [PATCH 004/150] Forgot to remove timestamps in the basewriter minimal change --- .../service/step/ODSBaseWriter.java | 14 ++++++++------ .../service/step/vfs/VfsReader.java | 1 - 2 files changed, 8 insertions(+), 7 deletions(-) diff --git a/src/main/java/org/onedatashare/transferservice/odstransferservice/service/step/ODSBaseWriter.java b/src/main/java/org/onedatashare/transferservice/odstransferservice/service/step/ODSBaseWriter.java index d189f614..be8ec4db 100644 --- a/src/main/java/org/onedatashare/transferservice/odstransferservice/service/step/ODSBaseWriter.java +++ b/src/main/java/org/onedatashare/transferservice/odstransferservice/service/step/ODSBaseWriter.java @@ -36,7 +36,7 @@ public ODSBaseWriter(MetricsCollector metricsCollector, InfluxCache influxCache) @BeforeWrite public void beforeWrite() { LocalDateTime startWriteTime = LocalDateTime.now(); - this.writeStartTimes.put(Thread.currentThread().getId(), startWriteTime); + this.writeStartTimes.put(Thread.currentThread().threadId(), startWriteTime); } @AfterWrite @@ -44,26 +44,28 @@ public void afterWrite(Chunk chunk) { List items = chunk.getItems(); LocalDateTime writeEndTime = LocalDateTime.now(); long totalBytes = items.stream().mapToLong(DataChunk::getSize).sum(); - LocalDateTime writeStartTime = this.writeStartTimes.get(Thread.currentThread().getId()); + long threadId = Thread.currentThread().threadId(); + LocalDateTime writeStartTime = this.writeStartTimes.remove(threadId); //this is a cache for the optimizer directly in. This i actually think should be deleted and all data querying maybe ideally is done through the monitoring interface - influxCache.addMetric(Thread.currentThread().getId(), stepExecution, totalBytes, writeStartTime, writeEndTime, InfluxCache.ThroughputType.WRITER, items.get(0).getSize()); + influxCache.addMetric(threadId, stepExecution, totalBytes, writeStartTime, writeEndTime, InfluxCache.ThroughputType.WRITER, items.get(0).getSize()); } @BeforeRead public void beforeRead() { LocalDateTime startReadTime = LocalDateTime.now(); - this.readStartTimes.put(Thread.currentThread().getId(), startReadTime); + this.readStartTimes.put(Thread.currentThread().threadId(), startReadTime); } @AfterRead public void afterRead(DataChunk item) { LocalDateTime endTime = LocalDateTime.now(); + long threadId = Thread.currentThread().threadId(); if (item == null) { return; } - LocalDateTime readStartTime = this.readStartTimes.get(Thread.currentThread().getId()); + LocalDateTime readStartTime = this.readStartTimes.remove(threadId); if (readStartTime == null) return; - influxCache.addMetric(Thread.currentThread().getId(), stepExecution, item.getSize(), readStartTime, endTime, InfluxCache.ThroughputType.READER, item.getSize()); + influxCache.addMetric(threadId, stepExecution, item.getSize(), readStartTime, endTime, InfluxCache.ThroughputType.READER, item.getSize()); } } diff --git a/src/main/java/org/onedatashare/transferservice/odstransferservice/service/step/vfs/VfsReader.java b/src/main/java/org/onedatashare/transferservice/odstransferservice/service/step/vfs/VfsReader.java index 89e7b0db..25f235dd 100644 --- a/src/main/java/org/onedatashare/transferservice/odstransferservice/service/step/vfs/VfsReader.java +++ b/src/main/java/org/onedatashare/transferservice/odstransferservice/service/step/vfs/VfsReader.java @@ -13,7 +13,6 @@ import org.springframework.batch.core.annotation.BeforeStep; import org.springframework.batch.item.support.AbstractItemCountingItemStreamItemReader; import org.springframework.util.ClassUtils; -import sun.misc.Unsafe; import java.io.IOException; import java.nio.ByteBuffer; From 5b439da4425fbcc96fa9e95ab36552c5dc1d685d Mon Sep 17 00:00:00 2001 From: Jacob Goldverg Date: Wed, 17 Jan 2024 22:09:31 -0500 Subject: [PATCH 005/150] Aright im making sure the parallelism change is reflected correctly --- .../pools/ThreadPoolManager.java | 71 +------------------ 1 file changed, 1 insertion(+), 70 deletions(-) diff --git a/src/main/java/org/onedatashare/transferservice/odstransferservice/pools/ThreadPoolManager.java b/src/main/java/org/onedatashare/transferservice/odstransferservice/pools/ThreadPoolManager.java index 74c32d00..c25ff34f 100644 --- a/src/main/java/org/onedatashare/transferservice/odstransferservice/pools/ThreadPoolManager.java +++ b/src/main/java/org/onedatashare/transferservice/odstransferservice/pools/ThreadPoolManager.java @@ -4,7 +4,6 @@ import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.core.task.SimpleAsyncTaskExecutor; -import org.springframework.scheduling.concurrent.ThreadPoolTaskExecutor; import org.springframework.stereotype.Service; import java.util.HashMap; @@ -17,30 +16,11 @@ public class ThreadPoolManager { @Getter HashMap executorHashmap; - HashMap platformThreadMap; Logger logger = LoggerFactory.getLogger(ThreadPoolManager.class); public ThreadPoolManager() { this.executorHashmap = new HashMap<>(); - this.platformThreadMap = new HashMap<>(); - } - - public ThreadPoolTaskExecutor createPlatformThreads(int corePoolSize, String prefix) { - ThreadPoolTaskExecutor executor = new ThreadPoolTaskExecutor(); - executor.setPrestartAllCoreThreads(true); -// executor.setQueueCapacity(1); - executor.setAllowCoreThreadTimeOut(false); - executor.setCorePoolSize(corePoolSize); -// executor.setMaxPoolSize(corePoolSize); - executor.setThreadNamePrefix(prefix); - executor.initialize(); - if (this.executorHashmap == null) { - this.executorHashmap = new HashMap<>(); - } - logger.info("Created ThreadPoolTaskExecutor: Prefix:{} with size:{}", prefix, corePoolSize); - this.platformThreadMap.put(prefix, executor); - return executor; } public SimpleAsyncTaskExecutor createVirtualThreadExecutor(int corePoolSize, String prefix) { @@ -73,52 +53,23 @@ public void applyOptimizer(int concurrency, int parallel) { if (key.contains(PARALLEL_POOL_PREFIX)) { logger.info("Changing {} pool size from {} to {}", pool.getThreadNamePrefix(), pool.getConcurrencyLimit(), parallel); if (parallel > 0 && parallel != pool.getConcurrencyLimit()) { - pool.setConcurrencyLimit(parallel); - logger.info("Set {} pool size to {}", pool.getThreadNamePrefix(), parallel); - } - } - } - - for (String key : this.platformThreadMap.keySet()) { - ThreadPoolTaskExecutor pool = this.platformThreadMap.get(key); - if (key.contains(STEP_POOL_PREFIX)) { - logger.info("Changing {} pool size from {} to {}", pool.getThreadNamePrefix(), pool.getCorePoolSize(), concurrency); - if (concurrency > 0 && concurrency != pool.getCorePoolSize()) { - pool.setCorePoolSize(concurrency); - logger.info("Set {} pool size to {}", pool.getThreadNamePrefix(), concurrency); - } - } - if (key.contains(PARALLEL_POOL_PREFIX)) { - logger.info("Changing {} pool size from {} to {}", pool.getThreadNamePrefix(), pool.getCorePoolSize(), parallel); - if (parallel > 0 && parallel != pool.getCorePoolSize()) { - pool.setCorePoolSize(parallel); + pool.setConcurrencyLimit(parallel * concurrency); logger.info("Set {} pool size to {}", pool.getThreadNamePrefix(), parallel); - } } } } public void clearJobPool() { - for (String key : this.platformThreadMap.keySet()) { - ThreadPoolTaskExecutor pool = this.platformThreadMap.get(key); - pool.shutdown(); - logger.info("Shutting ThreadPoolTaskExecutor down {}", pool.getThreadNamePrefix()); - } for (String key : this.executorHashmap.keySet()) { SimpleAsyncTaskExecutor pool = this.executorHashmap.get(key); pool.close(); logger.info("Shutting SimpleAsyncTaskExec down {}", pool.getThreadNamePrefix()); } this.executorHashmap.clear(); - this.platformThreadMap.clear(); logger.info("Cleared all thread pools"); } - // public SimpleAsyncTaskExecutor sequentialThreadPool() { -// return this.createVirtualThreadExecutor(1, SEQUENTIAL_POOL_PREFIX); -// } -// public SimpleAsyncTaskExecutor stepTaskExecutorVirtual(int threadCount) { SimpleAsyncTaskExecutor te = this.executorHashmap.get(STEP_POOL_PREFIX); if (te == null) { @@ -127,14 +78,6 @@ public SimpleAsyncTaskExecutor stepTaskExecutorVirtual(int threadCount) { return te; } - public ThreadPoolTaskExecutor stepTaskExecutorPlatform(int threadCount) { - ThreadPoolTaskExecutor te = this.platformThreadMap.get(STEP_POOL_PREFIX); - if (te == null) { - return this.createPlatformThreads(threadCount, STEP_POOL_PREFIX); - } - return te; - } - public SimpleAsyncTaskExecutor parallelThreadPoolVirtual(int threadCount, String fileName) { SimpleAsyncTaskExecutor te = this.executorHashmap.get(PARALLEL_POOL_PREFIX); if (te == null) { @@ -143,10 +86,6 @@ public SimpleAsyncTaskExecutor parallelThreadPoolVirtual(int threadCount, String return te; } - public ThreadPoolTaskExecutor parallelThreadPoolPlatform(int threadCount, String fileName) { - return this.createPlatformThreads(threadCount, new StringBuilder().append(fileName).append("-").append(PARALLEL_POOL_PREFIX).toString()); - } - public Integer concurrencyCount() { SimpleAsyncTaskExecutor threadPoolManager = this.executorHashmap.get(STEP_POOL_PREFIX); if (threadPoolManager == null) { @@ -165,14 +104,6 @@ public Integer parallelismCount() { } } } - for (String key : this.platformThreadMap.keySet()) { - if (key.contains(PARALLEL_POOL_PREFIX)) { - parallelism = this.platformThreadMap.get(key).getCorePoolSize(); - if (parallelism > 0) { - return parallelism; - } - } - } return parallelism; } From fdfba0408972577aac98e68c0d0c164a057b1a06 Mon Sep 17 00:00:00 2001 From: Jacob Goldverg Date: Wed, 17 Jan 2024 22:15:44 -0500 Subject: [PATCH 006/150] Using many pools just to try --- .../odstransferservice/pools/ThreadPoolManager.java | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/src/main/java/org/onedatashare/transferservice/odstransferservice/pools/ThreadPoolManager.java b/src/main/java/org/onedatashare/transferservice/odstransferservice/pools/ThreadPoolManager.java index c25ff34f..966296b3 100644 --- a/src/main/java/org/onedatashare/transferservice/odstransferservice/pools/ThreadPoolManager.java +++ b/src/main/java/org/onedatashare/transferservice/odstransferservice/pools/ThreadPoolManager.java @@ -53,7 +53,7 @@ public void applyOptimizer(int concurrency, int parallel) { if (key.contains(PARALLEL_POOL_PREFIX)) { logger.info("Changing {} pool size from {} to {}", pool.getThreadNamePrefix(), pool.getConcurrencyLimit(), parallel); if (parallel > 0 && parallel != pool.getConcurrencyLimit()) { - pool.setConcurrencyLimit(parallel * concurrency); + pool.setConcurrencyLimit(parallel); logger.info("Set {} pool size to {}", pool.getThreadNamePrefix(), parallel); } } @@ -79,9 +79,9 @@ public SimpleAsyncTaskExecutor stepTaskExecutorVirtual(int threadCount) { } public SimpleAsyncTaskExecutor parallelThreadPoolVirtual(int threadCount, String fileName) { - SimpleAsyncTaskExecutor te = this.executorHashmap.get(PARALLEL_POOL_PREFIX); + SimpleAsyncTaskExecutor te = this.executorHashmap.get(PARALLEL_POOL_PREFIX + fileName); if (te == null) { - te = this.createVirtualThreadExecutor(threadCount, PARALLEL_POOL_PREFIX); + te = this.createVirtualThreadExecutor(threadCount, PARALLEL_POOL_PREFIX + fileName); } return te; } From 2a5776d123658fa7bdcb104ccb192a7db33329c2 Mon Sep 17 00:00:00 2001 From: Jacob Goldverg Date: Wed, 17 Jan 2024 22:45:36 -0500 Subject: [PATCH 007/150] Aright this should be ideal now --- .../pools/ThreadPoolManager.java | 44 ++++++++----------- .../service/JobControl.java | 2 +- 2 files changed, 19 insertions(+), 27 deletions(-) diff --git a/src/main/java/org/onedatashare/transferservice/odstransferservice/pools/ThreadPoolManager.java b/src/main/java/org/onedatashare/transferservice/odstransferservice/pools/ThreadPoolManager.java index 966296b3..6b7f0dad 100644 --- a/src/main/java/org/onedatashare/transferservice/odstransferservice/pools/ThreadPoolManager.java +++ b/src/main/java/org/onedatashare/transferservice/odstransferservice/pools/ThreadPoolManager.java @@ -41,21 +41,18 @@ public SimpleAsyncTaskExecutor createVirtualThreadExecutor(int corePoolSize, Str * @param parallel */ public void applyOptimizer(int concurrency, int parallel) { - for (String key : this.executorHashmap.keySet()) { - SimpleAsyncTaskExecutor pool = this.executorHashmap.get(key); - if (key.contains(STEP_POOL_PREFIX)) { - logger.info("Changing {} pool size from {} to {}", pool.getThreadNamePrefix(), pool.getConcurrencyLimit(), concurrency); - if (concurrency > 0 && concurrency != pool.getConcurrencyLimit()) { - pool.setConcurrencyLimit(concurrency); - logger.info("Set {} pool size to {}", pool.getThreadNamePrefix(), concurrency); - } + SimpleAsyncTaskExecutor stepPool = this.executorHashmap.get(STEP_POOL_PREFIX); + if (stepPool != null) { + if (concurrency > 0 && concurrency != stepPool.getConcurrencyLimit()) { + stepPool.setConcurrencyLimit(concurrency); + logger.info("Set {} pool size to {}", stepPool.getThreadNamePrefix(), concurrency); } - if (key.contains(PARALLEL_POOL_PREFIX)) { - logger.info("Changing {} pool size from {} to {}", pool.getThreadNamePrefix(), pool.getConcurrencyLimit(), parallel); - if (parallel > 0 && parallel != pool.getConcurrencyLimit()) { - pool.setConcurrencyLimit(parallel); - logger.info("Set {} pool size to {}", pool.getThreadNamePrefix(), parallel); - } + } + SimpleAsyncTaskExecutor parallelPool = this.executorHashmap.get(PARALLEL_POOL_PREFIX); + if (parallelPool != null) { + if (parallel > 0 && parallel != parallelPool.getConcurrencyLimit()) { + parallelPool.setConcurrencyLimit(parallel * concurrency); + logger.info("Set {} pool size to {}", parallelPool.getThreadNamePrefix(), parallel); } } } @@ -78,10 +75,10 @@ public SimpleAsyncTaskExecutor stepTaskExecutorVirtual(int threadCount) { return te; } - public SimpleAsyncTaskExecutor parallelThreadPoolVirtual(int threadCount, String fileName) { - SimpleAsyncTaskExecutor te = this.executorHashmap.get(PARALLEL_POOL_PREFIX + fileName); + public SimpleAsyncTaskExecutor parallelThreadPoolVirtual(int threadCount) { + SimpleAsyncTaskExecutor te = this.executorHashmap.get(PARALLEL_POOL_PREFIX); if (te == null) { - te = this.createVirtualThreadExecutor(threadCount, PARALLEL_POOL_PREFIX + fileName); + te = this.createVirtualThreadExecutor(threadCount, PARALLEL_POOL_PREFIX); } return te; } @@ -95,16 +92,11 @@ public Integer concurrencyCount() { } public Integer parallelismCount() { - int parallelism = 0; - for (String key : this.executorHashmap.keySet()) { - if (key.contains(PARALLEL_POOL_PREFIX)) { - parallelism = this.executorHashmap.get(key).getConcurrencyLimit(); - if (parallelism > 0) { - return parallelism; - } - } + SimpleAsyncTaskExecutor threadPoolManager = this.executorHashmap.get(PARALLEL_POOL_PREFIX); + if (threadPoolManager == null) { + return 0; } - return parallelism; + return threadPoolManager.getConcurrencyLimit(); } } diff --git a/src/main/java/org/onedatashare/transferservice/odstransferservice/service/JobControl.java b/src/main/java/org/onedatashare/transferservice/odstransferservice/service/JobControl.java index b4e19ce4..50b9bca6 100644 --- a/src/main/java/org/onedatashare/transferservice/odstransferservice/service/JobControl.java +++ b/src/main/java/org/onedatashare/transferservice/odstransferservice/service/JobControl.java @@ -108,7 +108,7 @@ private List createConcurrentFlow(List infoList, String basePa .reader(getRightReader(request.getSource().getType(), file)) .writer(getRightWriter(request.getDestination().getType(), file)); if (this.request.getOptions().getParallelThreadCount() > 0) { - stepBuilder.taskExecutor(threadPoolManager.parallelThreadPoolVirtual(request.getOptions().getParallelThreadCount() * request.getOptions().getConcurrencyThreadCount(), file.getPath())); + stepBuilder.taskExecutor(threadPoolManager.parallelThreadPoolVirtual(request.getOptions().getParallelThreadCount())); } stepBuilder.throttleLimit(64); return new FlowBuilder(basePath + idForStep) From 859907c8ff8d2a8832ba8961052a36db8000a3af Mon Sep 17 00:00:00 2001 From: Jacob Goldverg Date: Wed, 17 Jan 2024 22:46:27 -0500 Subject: [PATCH 008/150] Aright this should be it --- .../transferservice/odstransferservice/service/JobControl.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/main/java/org/onedatashare/transferservice/odstransferservice/service/JobControl.java b/src/main/java/org/onedatashare/transferservice/odstransferservice/service/JobControl.java index 50b9bca6..bf27f6df 100644 --- a/src/main/java/org/onedatashare/transferservice/odstransferservice/service/JobControl.java +++ b/src/main/java/org/onedatashare/transferservice/odstransferservice/service/JobControl.java @@ -108,7 +108,7 @@ private List createConcurrentFlow(List infoList, String basePa .reader(getRightReader(request.getSource().getType(), file)) .writer(getRightWriter(request.getDestination().getType(), file)); if (this.request.getOptions().getParallelThreadCount() > 0) { - stepBuilder.taskExecutor(threadPoolManager.parallelThreadPoolVirtual(request.getOptions().getParallelThreadCount())); + stepBuilder.taskExecutor(threadPoolManager.parallelThreadPoolVirtual(request.getOptions().getParallelThreadCount() * request.getOptions().getConcurrencyThreadCount())); } stepBuilder.throttleLimit(64); return new FlowBuilder(basePath + idForStep) From 4d1e1757d18cd48e6946b864dd1092fc2020e967 Mon Sep 17 00:00:00 2001 From: Jacob Goldverg Date: Thu, 18 Jan 2024 14:12:43 -0500 Subject: [PATCH 009/150] Aright made virtual and platform threads into spring profiles and fully configurable bc i was tired of having this self argument of which is better now we just support both --- .../consumer/RabbitMQConsumer.java | 10 +- .../pools/ThreadPoolContract.java | 13 +++ .../pools/ThreadPoolManagerPlatform.java | 107 ++++++++++++++++++ ...ger.java => ThreadPoolManagerVirtual.java} | 66 ++++++----- .../service/InfluxCache.java | 12 +- .../service/JobControl.java | 8 +- .../listner/JobCompletionListener.java | 10 +- .../resources/application-platform.properties | 1 + .../resources/application-virtual.properties | 1 + src/main/resources/application.properties | 1 - 10 files changed, 181 insertions(+), 48 deletions(-) create mode 100644 src/main/java/org/onedatashare/transferservice/odstransferservice/pools/ThreadPoolContract.java create mode 100644 src/main/java/org/onedatashare/transferservice/odstransferservice/pools/ThreadPoolManagerPlatform.java rename src/main/java/org/onedatashare/transferservice/odstransferservice/pools/{ThreadPoolManager.java => ThreadPoolManagerVirtual.java} (56%) create mode 100644 src/main/resources/application-platform.properties create mode 100644 src/main/resources/application-virtual.properties diff --git a/src/main/java/org/onedatashare/transferservice/odstransferservice/consumer/RabbitMQConsumer.java b/src/main/java/org/onedatashare/transferservice/odstransferservice/consumer/RabbitMQConsumer.java index 46118159..533a3d65 100644 --- a/src/main/java/org/onedatashare/transferservice/odstransferservice/consumer/RabbitMQConsumer.java +++ b/src/main/java/org/onedatashare/transferservice/odstransferservice/consumer/RabbitMQConsumer.java @@ -8,7 +8,7 @@ import org.onedatashare.transferservice.odstransferservice.model.EntityInfo; import org.onedatashare.transferservice.odstransferservice.model.TransferJobRequest; import org.onedatashare.transferservice.odstransferservice.model.optimizer.TransferApplicationParams; -import org.onedatashare.transferservice.odstransferservice.pools.ThreadPoolManager; +import org.onedatashare.transferservice.odstransferservice.pools.ThreadPoolContract; import org.onedatashare.transferservice.odstransferservice.service.JobControl; import org.onedatashare.transferservice.odstransferservice.service.JobParamService; import org.onedatashare.transferservice.odstransferservice.service.VfsExpander; @@ -29,7 +29,7 @@ public class RabbitMQConsumer { private final ObjectMapper objectMapper; - private final ThreadPoolManager threadPoolManager; + private final ThreadPoolContract threadPool; Logger logger = LoggerFactory.getLogger(RabbitMQConsumer.class); JobControl jc; @@ -42,7 +42,7 @@ public class RabbitMQConsumer { VfsExpander vfsExpander; - public RabbitMQConsumer(VfsExpander vfsExpander, Queue userQueue, JobParamService jobParamService, JobLauncher asyncJobLauncher, JobControl jc, ThreadPoolManager threadPoolManager) { + public RabbitMQConsumer(VfsExpander vfsExpander, Queue userQueue, JobParamService jobParamService, JobLauncher asyncJobLauncher, JobControl jc, ThreadPoolContract threadPool) { this.vfsExpander = vfsExpander; this.userQueue = userQueue; this.jobParamService = jobParamService; @@ -51,7 +51,7 @@ public RabbitMQConsumer(VfsExpander vfsExpander, Queue userQueue, JobParamServic this.objectMapper = new ObjectMapper(); this.objectMapper.configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, true); this.objectMapper.setDefaultPropertyInclusion(JsonInclude.Include.ALWAYS); - this.threadPoolManager = threadPoolManager; + this.threadPool = threadPool; } @RabbitListener(queues = "#{userQueue}") @@ -78,7 +78,7 @@ public void consumeDefaultMessage(final Message message) { try { TransferApplicationParams params = objectMapper.readValue(jsonStr, TransferApplicationParams.class); logger.info("Parsed TransferApplicationParams: {}", params); - this.threadPoolManager.applyOptimizer(params.getConcurrency(), params.getParallelism()); + this.threadPool.applyOptimizer(params.getConcurrency(), params.getParallelism()); } catch (Exception e) { logger.error("Did not apply transfer params due to parsing message failure"); } diff --git a/src/main/java/org/onedatashare/transferservice/odstransferservice/pools/ThreadPoolContract.java b/src/main/java/org/onedatashare/transferservice/odstransferservice/pools/ThreadPoolContract.java new file mode 100644 index 00000000..04886902 --- /dev/null +++ b/src/main/java/org/onedatashare/transferservice/odstransferservice/pools/ThreadPoolContract.java @@ -0,0 +1,13 @@ +package org.onedatashare.transferservice.odstransferservice.pools; + +import org.springframework.core.task.TaskExecutor; + +public interface ThreadPoolContract { + public TaskExecutor createExecutor(int threadCount, String prefix); + public void applyOptimizer(int concurrency, int parallelism); + public void clearPools(); + public int concurrencyCount(); + public int parallelismCount(); + public TaskExecutor stepPool(int threadCount); + public TaskExecutor parallelPool(int threadCount, String filePath); +} diff --git a/src/main/java/org/onedatashare/transferservice/odstransferservice/pools/ThreadPoolManagerPlatform.java b/src/main/java/org/onedatashare/transferservice/odstransferservice/pools/ThreadPoolManagerPlatform.java new file mode 100644 index 00000000..68eeb83d --- /dev/null +++ b/src/main/java/org/onedatashare/transferservice/odstransferservice/pools/ThreadPoolManagerPlatform.java @@ -0,0 +1,107 @@ +package org.onedatashare.transferservice.odstransferservice.pools; + +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.springframework.context.annotation.Profile; +import org.springframework.scheduling.concurrent.ThreadPoolTaskExecutor; +import org.springframework.stereotype.Service; + +import java.util.HashMap; + +import static org.onedatashare.transferservice.odstransferservice.constant.ODSConstants.PARALLEL_POOL_PREFIX; +import static org.onedatashare.transferservice.odstransferservice.constant.ODSConstants.STEP_POOL_PREFIX; + +@Service("threadPool") +@Profile("platform") +public class ThreadPoolManagerPlatform implements ThreadPoolContract { + HashMap platformThreadMap; + Logger logger = LoggerFactory.getLogger(ThreadPoolManagerPlatform.class); + + public ThreadPoolManagerPlatform() { + this.platformThreadMap = new HashMap<>(); + } + + @Override + public ThreadPoolTaskExecutor createExecutor(int threadCount, String prefix) { + ThreadPoolTaskExecutor executor = new ThreadPoolTaskExecutor(); + executor.setAllowCoreThreadTimeOut(false); + executor.setCorePoolSize(threadCount); + executor.setThreadNamePrefix(prefix); + executor.initialize(); + if (this.platformThreadMap == null) { + this.platformThreadMap = new HashMap<>(); + } + logger.info("Created ThreadPoolTaskExecutor: Prefix:{} with size:{}", prefix, threadCount); + this.platformThreadMap.put(prefix, executor); + return executor; + } + + @Override + public void applyOptimizer(int concurrency, int parallelism) { + for (String key : this.platformThreadMap.keySet()) { + ThreadPoolTaskExecutor pool = this.platformThreadMap.get(key); + if (key.contains(STEP_POOL_PREFIX)) { + if (concurrency > 0 && concurrency != pool.getPoolSize()) { + pool.setCorePoolSize(concurrency); + logger.info("Set {} pool size to {}", pool.getThreadNamePrefix(), concurrency); + } + } + if (key.contains(PARALLEL_POOL_PREFIX)) { + logger.info("Changing {} pool size from {} to {}", pool.getThreadNamePrefix(), pool.getPoolSize(), parallelism); + if (parallelism > 0 && parallelism != pool.getPoolSize()) { + pool.setCorePoolSize(parallelism); + logger.info("Set {} pool size to {}", pool.getThreadNamePrefix(), parallelism); + } + } + } + } + + @Override + public void clearPools() { + for (String key : this.platformThreadMap.keySet()) { + ThreadPoolTaskExecutor pe = this.platformThreadMap.get(key); + pe.shutdown(); + } + this.platformThreadMap.clear(); + } + + @Override + public int concurrencyCount() { + ThreadPoolTaskExecutor pe = this.platformThreadMap.get(STEP_POOL_PREFIX); + if (pe == null) { + return 0; + } + return pe.getCorePoolSize(); + } + + @Override + public int parallelismCount() { + for (String key : this.platformThreadMap.keySet()) { + if (key.contains(PARALLEL_POOL_PREFIX)) { + ThreadPoolTaskExecutor threadPoolManager = this.platformThreadMap.get(PARALLEL_POOL_PREFIX); + if (threadPoolManager != null) { + return threadPoolManager.getCorePoolSize(); + } + } + } + return 0; + } + + @Override + public ThreadPoolTaskExecutor stepPool(int threadCount) { + ThreadPoolTaskExecutor te = this.platformThreadMap.get(STEP_POOL_PREFIX); + if (te == null) { + return this.createExecutor(threadCount, STEP_POOL_PREFIX); + } + return te; + } + + @Override + public ThreadPoolTaskExecutor parallelPool(int threadCount, String filePath) { + ThreadPoolTaskExecutor te = this.platformThreadMap.get(PARALLEL_POOL_PREFIX + filePath); + if (te == null) { + te = this.createExecutor(threadCount, PARALLEL_POOL_PREFIX + filePath); + } + return te; + } +} diff --git a/src/main/java/org/onedatashare/transferservice/odstransferservice/pools/ThreadPoolManager.java b/src/main/java/org/onedatashare/transferservice/odstransferservice/pools/ThreadPoolManagerVirtual.java similarity index 56% rename from src/main/java/org/onedatashare/transferservice/odstransferservice/pools/ThreadPoolManager.java rename to src/main/java/org/onedatashare/transferservice/odstransferservice/pools/ThreadPoolManagerVirtual.java index 6b7f0dad..7392ae60 100644 --- a/src/main/java/org/onedatashare/transferservice/odstransferservice/pools/ThreadPoolManager.java +++ b/src/main/java/org/onedatashare/transferservice/odstransferservice/pools/ThreadPoolManagerVirtual.java @@ -1,8 +1,8 @@ package org.onedatashare.transferservice.odstransferservice.pools; -import lombok.Getter; import org.slf4j.Logger; import org.slf4j.LoggerFactory; +import org.springframework.context.annotation.Profile; import org.springframework.core.task.SimpleAsyncTaskExecutor; import org.springframework.stereotype.Service; @@ -11,27 +11,29 @@ import static org.onedatashare.transferservice.odstransferservice.constant.ODSConstants.PARALLEL_POOL_PREFIX; import static org.onedatashare.transferservice.odstransferservice.constant.ODSConstants.STEP_POOL_PREFIX; -@Service -public class ThreadPoolManager { +@Service("threadPool") +@Profile("virtual") +public class ThreadPoolManagerVirtual implements ThreadPoolContract { - @Getter HashMap executorHashmap; - Logger logger = LoggerFactory.getLogger(ThreadPoolManager.class); + Logger logger = LoggerFactory.getLogger(ThreadPoolManagerVirtual.class); - public ThreadPoolManager() { + public ThreadPoolManagerVirtual() { this.executorHashmap = new HashMap<>(); } - public SimpleAsyncTaskExecutor createVirtualThreadExecutor(int corePoolSize, String prefix) { + + @Override + public SimpleAsyncTaskExecutor createExecutor(int threadCount, String prefix) { SimpleAsyncTaskExecutor executor = new SimpleAsyncTaskExecutor(); executor.setThreadNamePrefix(prefix); executor.setVirtualThreads(true); - executor.setConcurrencyLimit(corePoolSize); + executor.setConcurrencyLimit(threadCount); if (this.executorHashmap == null) { this.executorHashmap = new HashMap<>(); } - logger.info("Created a SimpleAsyncTaskExecutor: Prefix:{} with size:{}", prefix, corePoolSize); + logger.info("Created a SimpleAsyncTaskExecutor: Prefix:{} with size:{}", prefix, threadCount); this.executorHashmap.put(prefix, executor); return executor; } @@ -48,16 +50,21 @@ public void applyOptimizer(int concurrency, int parallel) { logger.info("Set {} pool size to {}", stepPool.getThreadNamePrefix(), concurrency); } } - SimpleAsyncTaskExecutor parallelPool = this.executorHashmap.get(PARALLEL_POOL_PREFIX); - if (parallelPool != null) { - if (parallel > 0 && parallel != parallelPool.getConcurrencyLimit()) { - parallelPool.setConcurrencyLimit(parallel * concurrency); - logger.info("Set {} pool size to {}", parallelPool.getThreadNamePrefix(), parallel); + for (String key : this.executorHashmap.keySet()) { + if (key.contains(PARALLEL_POOL_PREFIX)) { + SimpleAsyncTaskExecutor parallelPool = this.executorHashmap.get(PARALLEL_POOL_PREFIX); + if (parallelPool != null) { + if (parallel > 0 && parallel != parallelPool.getConcurrencyLimit()) { + parallelPool.setConcurrencyLimit(parallel); + logger.info("Set {} pool size to {}", parallelPool.getThreadNamePrefix(), parallel); + } + } } } } - public void clearJobPool() { + @Override + public void clearPools() { for (String key : this.executorHashmap.keySet()) { SimpleAsyncTaskExecutor pool = this.executorHashmap.get(key); pool.close(); @@ -67,23 +74,25 @@ public void clearJobPool() { logger.info("Cleared all thread pools"); } - public SimpleAsyncTaskExecutor stepTaskExecutorVirtual(int threadCount) { + @Override + public SimpleAsyncTaskExecutor stepPool(int threadCount) { SimpleAsyncTaskExecutor te = this.executorHashmap.get(STEP_POOL_PREFIX); if (te == null) { - return this.createVirtualThreadExecutor(threadCount, STEP_POOL_PREFIX); + return this.createExecutor(threadCount, STEP_POOL_PREFIX); } return te; } - public SimpleAsyncTaskExecutor parallelThreadPoolVirtual(int threadCount) { - SimpleAsyncTaskExecutor te = this.executorHashmap.get(PARALLEL_POOL_PREFIX); + @Override + public SimpleAsyncTaskExecutor parallelPool(int threadCount, String filePath) { + SimpleAsyncTaskExecutor te = this.executorHashmap.get(PARALLEL_POOL_PREFIX + filePath); if (te == null) { - te = this.createVirtualThreadExecutor(threadCount, PARALLEL_POOL_PREFIX); + te = this.createExecutor(threadCount, PARALLEL_POOL_PREFIX + filePath); } return te; } - public Integer concurrencyCount() { + public int concurrencyCount() { SimpleAsyncTaskExecutor threadPoolManager = this.executorHashmap.get(STEP_POOL_PREFIX); if (threadPoolManager == null) { return 0; @@ -91,12 +100,15 @@ public Integer concurrencyCount() { return threadPoolManager.getConcurrencyLimit(); } - public Integer parallelismCount() { - SimpleAsyncTaskExecutor threadPoolManager = this.executorHashmap.get(PARALLEL_POOL_PREFIX); - if (threadPoolManager == null) { - return 0; + public int parallelismCount() { + for (String key : this.executorHashmap.keySet()) { + if (key.contains(PARALLEL_POOL_PREFIX)) { + SimpleAsyncTaskExecutor threadPoolManager = this.executorHashmap.get(PARALLEL_POOL_PREFIX); + if (threadPoolManager != null) { + return threadPoolManager.getConcurrencyLimit(); + } + } } - return threadPoolManager.getConcurrencyLimit(); + return 0; } - } diff --git a/src/main/java/org/onedatashare/transferservice/odstransferservice/service/InfluxCache.java b/src/main/java/org/onedatashare/transferservice/odstransferservice/service/InfluxCache.java index c314dc35..705a2948 100644 --- a/src/main/java/org/onedatashare/transferservice/odstransferservice/service/InfluxCache.java +++ b/src/main/java/org/onedatashare/transferservice/odstransferservice/service/InfluxCache.java @@ -2,7 +2,7 @@ import org.onedatashare.transferservice.odstransferservice.constant.ODSConstants; import org.onedatashare.transferservice.odstransferservice.model.JobMetric; -import org.onedatashare.transferservice.odstransferservice.pools.ThreadPoolManager; +import org.onedatashare.transferservice.odstransferservice.pools.ThreadPoolContract; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.batch.core.StepExecution; @@ -26,7 +26,7 @@ @Service public class InfluxCache { - private final ThreadPoolManager threadPoolManager; + private final ThreadPoolContract threadPool; public ConcurrentHashMap threadCache; //stores a JobMetric that represents everything that thread has processed for the step. Thus each JobMetric is an aggregate of what has happened Logger logger = LoggerFactory.getLogger(InfluxCache.class); @@ -36,8 +36,8 @@ public enum ThroughputType { WRITER } - public InfluxCache(ThreadPoolManager threadPoolManager) { - this.threadPoolManager = threadPoolManager; + public InfluxCache(ThreadPoolContract threadPool) { + this.threadPool = threadPool; this.threadCache = new ConcurrentHashMap<>(); } @@ -47,8 +47,8 @@ public void addMetric(long threadId, StepExecution stepExecution, long totalByte prevMetric = new JobMetric(); prevMetric.setThreadId(threadId); prevMetric.setStepExecution(stepExecution); - prevMetric.setConcurrency(this.threadPoolManager.concurrencyCount()); - prevMetric.setParallelism(this.threadPoolManager.parallelismCount()); + prevMetric.setConcurrency(this.threadPool.concurrencyCount()); + prevMetric.setParallelism(this.threadPool.parallelismCount()); prevMetric.setPipelining(stepExecution.getJobParameters().getLong(PIPELINING).intValue()); prevMetric.setChunkSize(chunkSize); this.threadCache.put(threadId, prevMetric); diff --git a/src/main/java/org/onedatashare/transferservice/odstransferservice/service/JobControl.java b/src/main/java/org/onedatashare/transferservice/odstransferservice/service/JobControl.java index bf27f6df..5135ecf6 100644 --- a/src/main/java/org/onedatashare/transferservice/odstransferservice/service/JobControl.java +++ b/src/main/java/org/onedatashare/transferservice/odstransferservice/service/JobControl.java @@ -7,7 +7,7 @@ import org.onedatashare.transferservice.odstransferservice.model.DataChunk; import org.onedatashare.transferservice.odstransferservice.model.EntityInfo; import org.onedatashare.transferservice.odstransferservice.model.TransferJobRequest; -import org.onedatashare.transferservice.odstransferservice.pools.ThreadPoolManager; +import org.onedatashare.transferservice.odstransferservice.pools.ThreadPoolContract; import org.onedatashare.transferservice.odstransferservice.service.DatabaseService.InfluxIOService; import org.onedatashare.transferservice.odstransferservice.service.cron.MetricsCollector; import org.onedatashare.transferservice.odstransferservice.service.listner.JobCompletionListener; @@ -88,7 +88,7 @@ public class JobControl { InfluxIOService influxIOService; @Autowired - ThreadPoolManager threadPoolManager; + ThreadPoolContract threadPool; private List createConcurrentFlow(List infoList, String basePath) { if (this.request.getSource().getType().equals(EndpointType.vfs)) { @@ -108,7 +108,7 @@ private List createConcurrentFlow(List infoList, String basePa .reader(getRightReader(request.getSource().getType(), file)) .writer(getRightWriter(request.getDestination().getType(), file)); if (this.request.getOptions().getParallelThreadCount() > 0) { - stepBuilder.taskExecutor(threadPoolManager.parallelThreadPoolVirtual(request.getOptions().getParallelThreadCount() * request.getOptions().getConcurrencyThreadCount())); + stepBuilder.taskExecutor(threadPool.parallelPool(request.getOptions().getParallelThreadCount(), file.getPath())); } stepBuilder.throttleLimit(64); return new FlowBuilder(basePath + idForStep) @@ -214,7 +214,7 @@ public Job concurrentJobDefinition() { Flow[] fl = new Flow[flows.size()]; Flow f = new FlowBuilder("splitFlow") // .split(this.threadPoolManager.stepTaskExecutorVirtual(this.request.getOptions().getConcurrencyThreadCount())) - .split(this.threadPoolManager.stepTaskExecutorVirtual(this.request.getOptions().getConcurrencyThreadCount())) + .split(this.threadPool.stepPool(this.request.getOptions().getConcurrencyThreadCount())) .add(flows.toArray(fl)) .build(); return jobBuilder diff --git a/src/main/java/org/onedatashare/transferservice/odstransferservice/service/listner/JobCompletionListener.java b/src/main/java/org/onedatashare/transferservice/odstransferservice/service/listner/JobCompletionListener.java index a974a836..5b6228b2 100644 --- a/src/main/java/org/onedatashare/transferservice/odstransferservice/service/listner/JobCompletionListener.java +++ b/src/main/java/org/onedatashare/transferservice/odstransferservice/service/listner/JobCompletionListener.java @@ -3,7 +3,7 @@ import org.onedatashare.transferservice.odstransferservice.constant.ODSConstants; import org.onedatashare.transferservice.odstransferservice.model.optimizer.OptimizerCreateRequest; import org.onedatashare.transferservice.odstransferservice.model.optimizer.OptimizerDeleteRequest; -import org.onedatashare.transferservice.odstransferservice.pools.ThreadPoolManager; +import org.onedatashare.transferservice.odstransferservice.pools.ThreadPoolContract; import org.onedatashare.transferservice.odstransferservice.service.ConnectionBag; import org.onedatashare.transferservice.odstransferservice.service.OptimizerService; import org.onedatashare.transferservice.odstransferservice.service.cron.MetricsCollector; @@ -26,7 +26,7 @@ @Service public class JobCompletionListener implements JobExecutionListener { - private final ThreadPoolManager threadPoolManager; + private final ThreadPoolContract threadPool; private Set jobIds; Logger logger = LoggerFactory.getLogger(JobCompletionListener.class); @@ -53,12 +53,12 @@ public class JobCompletionListener implements JobExecutionListener { @Autowired Environment environment; - public JobCompletionListener(OptimizerService optimizerService, MetricsCollector metricsCollector, ConnectionBag connectionBag, ThreadPoolManager threadPoolManager, Set jobIds) { + public JobCompletionListener(OptimizerService optimizerService, MetricsCollector metricsCollector, ConnectionBag connectionBag, ThreadPoolContract threadPool, Set jobIds) { this.optimizerService = optimizerService; this.metricsCollector = metricsCollector; this.connectionBag = connectionBag; this.optimizerEnable = false; - this.threadPoolManager = threadPoolManager; + this.threadPool = threadPool; this.jobIds = jobIds; } @@ -91,7 +91,7 @@ public void afterJob(JobExecution jobExecution) { this.optimizerService.deleteOptimizerBlocking(new OptimizerDeleteRequest(appName)); this.optimizerEnable = false; } - this.threadPoolManager.clearJobPool(); + this.threadPool.clearPools(); System.gc(); } } diff --git a/src/main/resources/application-platform.properties b/src/main/resources/application-platform.properties new file mode 100644 index 00000000..d0122620 --- /dev/null +++ b/src/main/resources/application-platform.properties @@ -0,0 +1 @@ +spring.threads.virtual.enabled=false \ No newline at end of file diff --git a/src/main/resources/application-virtual.properties b/src/main/resources/application-virtual.properties new file mode 100644 index 00000000..0daaddc4 --- /dev/null +++ b/src/main/resources/application-virtual.properties @@ -0,0 +1 @@ +spring.threads.virtual.enabled=true \ No newline at end of file diff --git a/src/main/resources/application.properties b/src/main/resources/application.properties index a8651907..e60db0fd 100644 --- a/src/main/resources/application.properties +++ b/src/main/resources/application.properties @@ -3,7 +3,6 @@ ods.user=${USER_NAME} server.port=8092 -spring.threads.virtual.enabled=true spring.main.allow-bean-definition-overriding=true #Eureka config eureka.client.enabled=true From 0c2fbb024d77c22bfc93f7a9363bef7c9e902ae8 Mon Sep 17 00:00:00 2001 From: Jacob Goldverg Date: Fri, 19 Jan 2024 14:05:44 -0500 Subject: [PATCH 010/150] Added carbon score concept which is on pmeter version 1.0.11 --- .../constant/ODSConstants.java | 2 ++ .../model/metrics/CarbonScore.java | 14 ++++++++++ .../service/JobParamService.java | 18 ++++++++++--- .../service/PmeterParser.java | 27 +++++++++++++++++++ src/main/resources/application.properties | 1 + 5 files changed, 59 insertions(+), 3 deletions(-) create mode 100644 src/main/java/org/onedatashare/transferservice/odstransferservice/model/metrics/CarbonScore.java diff --git a/src/main/java/org/onedatashare/transferservice/odstransferservice/constant/ODSConstants.java b/src/main/java/org/onedatashare/transferservice/odstransferservice/constant/ODSConstants.java index b91d561f..a9088f41 100644 --- a/src/main/java/org/onedatashare/transferservice/odstransferservice/constant/ODSConstants.java +++ b/src/main/java/org/onedatashare/transferservice/odstransferservice/constant/ODSConstants.java @@ -6,6 +6,7 @@ public class ODSConstants { public static final String TIME = "time"; public static final String SOURCE_HOST = "sourceURI"; public static final String SOURCE_PORT = "sourcePort"; + public static final String CARBON_SCORE_SOURCE = "sourceCarbonScore"; public static final String SOURCE_BASE_PATH = "sourceBasePath"; public static final String DEST_BASE_PATH = "destBasePath"; public static final String FILE_COUNT = "fileCount"; @@ -15,6 +16,7 @@ public class ODSConstants { public static final String DEST_CREDENTIAL_TYPE = "destCredentialType"; public static final String DEST_HOST = "destURI"; public static final String DEST_PORT = "destPort"; + public static final String CARBON_SCORE_DEST = "destCarbonScore"; public static final String CHUNK_SIZE = "chunkSize"; public static final String JOB_UUID = "jobUuid"; public static final String OWNER_ID = "ownerId"; diff --git a/src/main/java/org/onedatashare/transferservice/odstransferservice/model/metrics/CarbonScore.java b/src/main/java/org/onedatashare/transferservice/odstransferservice/model/metrics/CarbonScore.java new file mode 100644 index 00000000..0ad44096 --- /dev/null +++ b/src/main/java/org/onedatashare/transferservice/odstransferservice/model/metrics/CarbonScore.java @@ -0,0 +1,14 @@ +package org.onedatashare.transferservice.odstransferservice.model.metrics; + +import com.fasterxml.jackson.annotation.JsonIgnoreProperties; +import lombok.Data; + +@Data +@JsonIgnoreProperties(ignoreUnknown = true) +public class CarbonScore { + public int avgCarbon; + + public CarbonScore(){ + this.avgCarbon = 0; + } +} diff --git a/src/main/java/org/onedatashare/transferservice/odstransferservice/service/JobParamService.java b/src/main/java/org/onedatashare/transferservice/odstransferservice/service/JobParamService.java index 6203ec4e..3e3cb43c 100644 --- a/src/main/java/org/onedatashare/transferservice/odstransferservice/service/JobParamService.java +++ b/src/main/java/org/onedatashare/transferservice/odstransferservice/service/JobParamService.java @@ -5,11 +5,13 @@ import org.onedatashare.transferservice.odstransferservice.model.TransferJobRequest; import org.onedatashare.transferservice.odstransferservice.model.credential.AccountEndpointCredential; import org.onedatashare.transferservice.odstransferservice.model.credential.EndpointCredential; +import org.onedatashare.transferservice.odstransferservice.model.metrics.CarbonScore; import org.onedatashare.transferservice.odstransferservice.utility.S3Utility; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.batch.core.JobParameters; import org.springframework.batch.core.JobParametersBuilder; +import org.springframework.beans.factory.annotation.Autowired; import org.springframework.beans.factory.annotation.Value; import org.springframework.stereotype.Service; @@ -25,6 +27,9 @@ public class JobParamService { @Value("${spring.application.name}") private String appName; + + @Autowired + PmeterParser pmeterParser; /** * Here we are adding basically the whole request except for sensitive credentials to the Job Params table. * B/C we do not add @@ -71,20 +76,27 @@ public JobParameters translate(JobParametersBuilder builder, TransferJobRequest //adding the source host and source port to use for RTT & Latency measurements. if (request.getSource().getVfsSourceCredential() != null) { - builder.addString(SOURCE_HOST, this.uriFromEndpointCredential(request.getSource().getVfsSourceCredential(), sourceType)); + String sourceIp = this.uriFromEndpointCredential(request.getSource().getVfsSourceCredential(), sourceType); + builder.addString(SOURCE_HOST, sourceIp); builder.addLong(SOURCE_PORT, (long) this.portFromEndpointCredential(request.getSource().getVfsSourceCredential(), sourceType)); + CarbonScore score = this.pmeterParser.runCarbonPmeter(sourceIp); + logger.info("Source Carbon Score: {}", score.avgCarbon); + builder.addLong(CARBON_SCORE_SOURCE, (long) score.avgCarbon); } else if (request.getSource().getOauthSourceCredential() != null) { builder.addString(SOURCE_HOST, this.uriFromEndpointCredential(request.getSource().getOauthSourceCredential(), sourceType)); builder.addLong(SOURCE_PORT, (long) this.portFromEndpointCredential(request.getSource().getOauthSourceCredential(), sourceType)); } if (request.getDestination().getVfsDestCredential() != null) { - builder.addString(DEST_HOST, this.uriFromEndpointCredential(request.getDestination().getVfsDestCredential(), destType)); + String destIp = this.uriFromEndpointCredential(request.getDestination().getVfsDestCredential(), destType); + builder.addString(DEST_HOST, destIp); builder.addLong(DEST_PORT, (long) this.portFromEndpointCredential(request.getDestination().getVfsDestCredential(), destType)); + CarbonScore score = this.pmeterParser.runCarbonPmeter(destIp); + logger.info("Destination Carbon Score: {}", score.avgCarbon); + builder.addLong(CARBON_SCORE_DEST, (long)score.avgCarbon); } else if (request.getDestination().getOauthDestCredential() != null) { builder.addString(DEST_HOST, this.uriFromEndpointCredential(request.getDestination().getOauthDestCredential(), destType)); builder.addLong(DEST_PORT, (long) this.portFromEndpointCredential(request.getDestination().getOauthDestCredential(), destType)); } - return builder.toJobParameters(); } diff --git a/src/main/java/org/onedatashare/transferservice/odstransferservice/service/PmeterParser.java b/src/main/java/org/onedatashare/transferservice/odstransferservice/service/PmeterParser.java index 07c534d4..1e247cfa 100644 --- a/src/main/java/org/onedatashare/transferservice/odstransferservice/service/PmeterParser.java +++ b/src/main/java/org/onedatashare/transferservice/odstransferservice/service/PmeterParser.java @@ -6,6 +6,7 @@ import org.apache.commons.exec.DefaultExecutor; import org.apache.commons.exec.ExecuteWatchdog; import org.apache.commons.exec.PumpStreamHandler; +import org.onedatashare.transferservice.odstransferservice.model.metrics.CarbonScore; import org.onedatashare.transferservice.odstransferservice.model.metrics.DataInflux; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -31,6 +32,8 @@ public class PmeterParser { Logger logger = LoggerFactory.getLogger(PmeterParser.class); + @Value("${pmeter.carbon.path}") + String pmeterCarbonPath; @Value("${pmeter.report.path}") String pmeterReportPath; @@ -92,4 +95,28 @@ public List parsePmeterOutput() throws IOException { path.toFile().createNewFile(); return ret; } + + public CarbonScore runCarbonPmeter(String ip){ + //pmeter carbon 129.114.108.45 + CommandLine carbonCmd= CommandLine.parse(String.format("pmeter carbon %s", ip)); + try { + executor.execute(carbonCmd); + } catch (IOException e) { + return new CarbonScore(); + } + try { + Path filePath = Paths.get(this.pmeterCarbonPath); + List lines = Files.readAllLines(filePath); + CarbonScore score = new CarbonScore(); + for(String line: lines){ + score = this.pmeterMapper.readValue(line, CarbonScore.class); + break; + } + filePath.toFile().delete(); + filePath.toFile().createNewFile(); + return score; + } catch (IOException e) { + return new CarbonScore(); + } + } } diff --git a/src/main/resources/application.properties b/src/main/resources/application.properties index e60db0fd..2ca52395 100644 --- a/src/main/resources/application.properties +++ b/src/main/resources/application.properties @@ -43,6 +43,7 @@ pmeter.interface=${PMETER_NIC_INTERFACE:awdl0} pmeter.measure=${PMETER_MEASURE:1} pmeter.options=${PMETER_CLI_OPTIONS:"-KNS"} pmeter.cron.expression=${PMETER_CRON_EXP:*/5 * * * * *} +pmeter.carbon.path=${PMETER_CARBON_PATH:${HOME}/.pmeter/carbon_pmeter.txt} #influx ods.influx.uri=${INFLUX_URI:https://influxdb.onedatashare.org} From f627292c84b1b57e6179fc7827b57cad7709288f Mon Sep 17 00:00:00 2001 From: Jacob Goldverg Date: Fri, 19 Jan 2024 14:10:13 -0500 Subject: [PATCH 011/150] created a separate executor this way there is no overlap and it doesnt linger in memory, everytime we call it a new exector gets created and the jvm should destroy it quickly as its method scoped --- .../odstransferservice/service/PmeterParser.java | 14 +++++++------- 1 file changed, 7 insertions(+), 7 deletions(-) diff --git a/src/main/java/org/onedatashare/transferservice/odstransferservice/service/PmeterParser.java b/src/main/java/org/onedatashare/transferservice/odstransferservice/service/PmeterParser.java index 1e247cfa..7dfcdcd7 100644 --- a/src/main/java/org/onedatashare/transferservice/odstransferservice/service/PmeterParser.java +++ b/src/main/java/org/onedatashare/transferservice/odstransferservice/service/PmeterParser.java @@ -27,7 +27,7 @@ public class PmeterParser { private final String MEASURE = "measure"; private final ByteArrayOutputStream outputStream; private final PumpStreamHandler streamHandler; - private final DefaultExecutor executor; + private final DefaultExecutor pmeterExecutor; private final ExecuteWatchdog watchDog; Logger logger = LoggerFactory.getLogger(PmeterParser.class); @@ -64,11 +64,10 @@ public PmeterParser(ObjectMapper pmeterMapper) { this.outputStream = new ByteArrayOutputStream(); this.streamHandler = new PumpStreamHandler(outputStream); - this.executor = new DefaultExecutor(); - + this.pmeterExecutor = new DefaultExecutor(); this.watchDog = new ExecuteWatchdog(ExecuteWatchdog.INFINITE_TIMEOUT); - executor.setWatchdog(watchDog); - executor.setStreamHandler(streamHandler); + pmeterExecutor.setWatchdog(watchDog); + pmeterExecutor.setStreamHandler(streamHandler); this.pmeterMapper = pmeterMapper; } @@ -76,7 +75,7 @@ public PmeterParser(ObjectMapper pmeterMapper) { public void runPmeter() { try { - executor.execute(cmdLine); + pmeterExecutor.execute(cmdLine); } catch (IOException e) { logger.error("Failed in executing pmeter script:\n " + cmdLine); e.printStackTrace(); @@ -100,7 +99,8 @@ public CarbonScore runCarbonPmeter(String ip){ //pmeter carbon 129.114.108.45 CommandLine carbonCmd= CommandLine.parse(String.format("pmeter carbon %s", ip)); try { - executor.execute(carbonCmd); + DefaultExecutor carbonExecutor = new DefaultExecutor(); + carbonExecutor.execute(carbonCmd); } catch (IOException e) { return new CarbonScore(); } From 1a94e4bee71df492d1008283f20fd6446462d380 Mon Sep 17 00:00:00 2001 From: Jacob Goldverg Date: Fri, 19 Jan 2024 14:11:00 -0500 Subject: [PATCH 012/150] visual changes --- .../odstransferservice/service/PmeterParser.java | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/src/main/java/org/onedatashare/transferservice/odstransferservice/service/PmeterParser.java b/src/main/java/org/onedatashare/transferservice/odstransferservice/service/PmeterParser.java index 7dfcdcd7..33507907 100644 --- a/src/main/java/org/onedatashare/transferservice/odstransferservice/service/PmeterParser.java +++ b/src/main/java/org/onedatashare/transferservice/odstransferservice/service/PmeterParser.java @@ -95,9 +95,9 @@ public List parsePmeterOutput() throws IOException { return ret; } - public CarbonScore runCarbonPmeter(String ip){ + public CarbonScore runCarbonPmeter(String ip) { //pmeter carbon 129.114.108.45 - CommandLine carbonCmd= CommandLine.parse(String.format("pmeter carbon %s", ip)); + CommandLine carbonCmd = CommandLine.parse(String.format("pmeter carbon %s", ip)); try { DefaultExecutor carbonExecutor = new DefaultExecutor(); carbonExecutor.execute(carbonCmd); @@ -108,7 +108,7 @@ public CarbonScore runCarbonPmeter(String ip){ Path filePath = Paths.get(this.pmeterCarbonPath); List lines = Files.readAllLines(filePath); CarbonScore score = new CarbonScore(); - for(String line: lines){ + for (String line : lines) { score = this.pmeterMapper.readValue(line, CarbonScore.class); break; } From af4268f8423a586431f77d49a6a141d1a3cf5531 Mon Sep 17 00:00:00 2001 From: Jacob Goldverg Date: Fri, 19 Jan 2024 14:11:53 -0500 Subject: [PATCH 013/150] updated pmeter version --- Dockerfile | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Dockerfile b/Dockerfile index a8f0990b..6e48a0e7 100644 --- a/Dockerfile +++ b/Dockerfile @@ -8,7 +8,7 @@ RUN mvn -f /home/app/pom.xml clean package -DskipTests FROM amazoncorretto:21-alpine-jdk RUN apk --no-cache add python3-dev py3-pip build-base gcc linux-headers -RUN pip3 install pmeter-ods==1.0.8 +RUN pip3 install pmeter-ods==1.0.11 COPY --from=build /home/app/target/ods-transfer-service-0.0.1-SNAPSHOT.jar /usr/local/lib/ods-transfer-service-0.0.1-SNAPSHOT.jar From 69778bdc3a21a7fbb7f9f2e9b4406e92772461e7 Mon Sep 17 00:00:00 2001 From: Jacob Goldverg Date: Fri, 19 Jan 2024 17:11:35 -0500 Subject: [PATCH 014/150] latest alpine isnt working with py3-pi for some reason --- Dockerfile | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/Dockerfile b/Dockerfile index 6e48a0e7..56022dc8 100644 --- a/Dockerfile +++ b/Dockerfile @@ -1,14 +1,16 @@ -FROM maven:3.9.5-amazoncorretto-21 AS build +FROM maven:3.9.6-amazoncorretto-21 AS build COPY src /home/app/src COPY pom.xml /home/app RUN mvn -f /home/app/pom.xml clean package -DskipTests # Final Image -FROM amazoncorretto:21-alpine-jdk +FROM amazoncorretto:21-alpine3.18-jdk +RUN apk update RUN apk --no-cache add python3-dev py3-pip build-base gcc linux-headers -RUN pip3 install pmeter-ods==1.0.11 + +RUN pip install pmeter-ods==1.0.11 COPY --from=build /home/app/target/ods-transfer-service-0.0.1-SNAPSHOT.jar /usr/local/lib/ods-transfer-service-0.0.1-SNAPSHOT.jar @@ -24,8 +26,6 @@ ENV ODS_GDRIVE_PROJECT_ID="onedatashare-dev" ENV EUREKA_URI="${EUREKA_URI}" ENV EUREKA_PASS="${EUREKA_PASS}" ENV EUREKA_USER="${EUREKA_USER}" -ENV FOLDER_WITH_CERTS="${FOLDER_WITH_CERTS}" -COPY ${FOLDER_WITH_CERTS} /certs/ ENV COCKROACH_URI="${COCKROACH_URI}" ENV COCKROACH_USER="${COCKROACH_USER}" ENV COCKROACH_PASS="${COCKROACH_PASS}" From 05fec5613617f80a52af1719ee94d5bf14582019 Mon Sep 17 00:00:00 2001 From: Jacob Goldverg Date: Fri, 19 Jan 2024 17:25:41 -0500 Subject: [PATCH 015/150] Aright making carbon reporting into a toggle where if its off the score is just 0 --- .../odstransferservice/service/PmeterParser.java | 5 +++++ src/main/resources/application.properties | 1 + 2 files changed, 6 insertions(+) diff --git a/src/main/java/org/onedatashare/transferservice/odstransferservice/service/PmeterParser.java b/src/main/java/org/onedatashare/transferservice/odstransferservice/service/PmeterParser.java index 33507907..a9c581a2 100644 --- a/src/main/java/org/onedatashare/transferservice/odstransferservice/service/PmeterParser.java +++ b/src/main/java/org/onedatashare/transferservice/odstransferservice/service/PmeterParser.java @@ -49,6 +49,10 @@ public class PmeterParser { @Value("${pmeter.options}") String pmeterOptions; + + @Value("${pmeter.carbon.toggle}") + private boolean toggle; + ObjectMapper pmeterMapper; private CommandLine cmdLine; @@ -97,6 +101,7 @@ public List parsePmeterOutput() throws IOException { public CarbonScore runCarbonPmeter(String ip) { //pmeter carbon 129.114.108.45 + if(this.toggle == false){return new CarbonScore();} CommandLine carbonCmd = CommandLine.parse(String.format("pmeter carbon %s", ip)); try { DefaultExecutor carbonExecutor = new DefaultExecutor(); diff --git a/src/main/resources/application.properties b/src/main/resources/application.properties index 2ca52395..5267035a 100644 --- a/src/main/resources/application.properties +++ b/src/main/resources/application.properties @@ -44,6 +44,7 @@ pmeter.measure=${PMETER_MEASURE:1} pmeter.options=${PMETER_CLI_OPTIONS:"-KNS"} pmeter.cron.expression=${PMETER_CRON_EXP:*/5 * * * * *} pmeter.carbon.path=${PMETER_CARBON_PATH:${HOME}/.pmeter/carbon_pmeter.txt} +pmeter.carbon.toggle=${REPORT_CARBON:false} #influx ods.influx.uri=${INFLUX_URI:https://influxdb.onedatashare.org} From 23d7d458761b2c943d59d5092b695784616fd68c Mon Sep 17 00:00:00 2001 From: Jacob Goldverg Date: Sat, 20 Jan 2024 13:38:29 -0500 Subject: [PATCH 016/150] Little mistake with reporting parallelism --- .../odstransferservice/pools/ThreadPoolManagerPlatform.java | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/src/main/java/org/onedatashare/transferservice/odstransferservice/pools/ThreadPoolManagerPlatform.java b/src/main/java/org/onedatashare/transferservice/odstransferservice/pools/ThreadPoolManagerPlatform.java index 68eeb83d..f7b84f3e 100644 --- a/src/main/java/org/onedatashare/transferservice/odstransferservice/pools/ThreadPoolManagerPlatform.java +++ b/src/main/java/org/onedatashare/transferservice/odstransferservice/pools/ThreadPoolManagerPlatform.java @@ -79,8 +79,9 @@ public int parallelismCount() { for (String key : this.platformThreadMap.keySet()) { if (key.contains(PARALLEL_POOL_PREFIX)) { ThreadPoolTaskExecutor threadPoolManager = this.platformThreadMap.get(PARALLEL_POOL_PREFIX); - if (threadPoolManager != null) { - return threadPoolManager.getCorePoolSize(); + int parallelismCount = threadPoolManager.getCorePoolSize(); + if(parallelismCount != 0){ + return parallelismCount; } } } From f710805b24f5e3f8aa8879da76cf42c0db22102b Mon Sep 17 00:00:00 2001 From: Jacob Goldverg Date: Sat, 20 Jan 2024 13:43:20 -0500 Subject: [PATCH 017/150] Whoops wrong key being used in the map --- .../pools/ThreadPoolManagerPlatform.java | 10 ++++++---- 1 file changed, 6 insertions(+), 4 deletions(-) diff --git a/src/main/java/org/onedatashare/transferservice/odstransferservice/pools/ThreadPoolManagerPlatform.java b/src/main/java/org/onedatashare/transferservice/odstransferservice/pools/ThreadPoolManagerPlatform.java index f7b84f3e..086ec0a8 100644 --- a/src/main/java/org/onedatashare/transferservice/odstransferservice/pools/ThreadPoolManagerPlatform.java +++ b/src/main/java/org/onedatashare/transferservice/odstransferservice/pools/ThreadPoolManagerPlatform.java @@ -78,10 +78,12 @@ public int concurrencyCount() { public int parallelismCount() { for (String key : this.platformThreadMap.keySet()) { if (key.contains(PARALLEL_POOL_PREFIX)) { - ThreadPoolTaskExecutor threadPoolManager = this.platformThreadMap.get(PARALLEL_POOL_PREFIX); - int parallelismCount = threadPoolManager.getCorePoolSize(); - if(parallelismCount != 0){ - return parallelismCount; + ThreadPoolTaskExecutor threadPoolManager = this.platformThreadMap.get(key); + if(threadPoolManager != null){ + int parallelismCount = threadPoolManager.getCorePoolSize(); + if(parallelismCount != 0){ + return parallelismCount; + } } } } From 1ef8a7ec013e4b2cbc77d00cfc844795be1e0547 Mon Sep 17 00:00:00 2001 From: Jacob Goldverg Date: Sat, 20 Jan 2024 13:53:18 -0500 Subject: [PATCH 018/150] Aright this should have fixed the other issue with the key being wrong --- .../odstransferservice/pools/ThreadPoolManagerVirtual.java | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/src/main/java/org/onedatashare/transferservice/odstransferservice/pools/ThreadPoolManagerVirtual.java b/src/main/java/org/onedatashare/transferservice/odstransferservice/pools/ThreadPoolManagerVirtual.java index 7392ae60..5780694a 100644 --- a/src/main/java/org/onedatashare/transferservice/odstransferservice/pools/ThreadPoolManagerVirtual.java +++ b/src/main/java/org/onedatashare/transferservice/odstransferservice/pools/ThreadPoolManagerVirtual.java @@ -103,9 +103,9 @@ public int concurrencyCount() { public int parallelismCount() { for (String key : this.executorHashmap.keySet()) { if (key.contains(PARALLEL_POOL_PREFIX)) { - SimpleAsyncTaskExecutor threadPoolManager = this.executorHashmap.get(PARALLEL_POOL_PREFIX); - if (threadPoolManager != null) { - return threadPoolManager.getConcurrencyLimit(); + SimpleAsyncTaskExecutor executor = this.executorHashmap.get(key); + if (executor != null) { + return executor.getConcurrencyLimit(); } } } From 28da793f8cf42522ebcde4b0888689c86c8c6445 Mon Sep 17 00:00:00 2001 From: Jacob Goldverg Date: Sat, 20 Jan 2024 17:29:32 -0500 Subject: [PATCH 019/150] Alright pushing the status to influx so the optimizer doesnt need to query cockroach --- .../odstransferservice/constant/DataInfluxConstants.java | 1 + .../odstransferservice/model/metrics/DataInflux.java | 3 +++ .../odstransferservice/service/cron/MetricsCollector.java | 3 +++ .../odstransferservice/service/step/vfs/VfsWriter.java | 3 +-- 4 files changed, 8 insertions(+), 2 deletions(-) diff --git a/src/main/java/org/onedatashare/transferservice/odstransferservice/constant/DataInfluxConstants.java b/src/main/java/org/onedatashare/transferservice/odstransferservice/constant/DataInfluxConstants.java index bee32170..03d38624 100644 --- a/src/main/java/org/onedatashare/transferservice/odstransferservice/constant/DataInfluxConstants.java +++ b/src/main/java/org/onedatashare/transferservice/odstransferservice/constant/DataInfluxConstants.java @@ -2,6 +2,7 @@ public class DataInfluxConstants { public static final String NETWORK_INTERFACE = "interface"; + public static final String IS_RUNNING = "isRunning"; public static final String ODS_USER = "ods_user"; public static final String TRANSFER_NODE_NAME = "transfer_node_name"; public static final String ACTIVE_CORE_COUNT = "active_core_count"; diff --git a/src/main/java/org/onedatashare/transferservice/odstransferservice/model/metrics/DataInflux.java b/src/main/java/org/onedatashare/transferservice/odstransferservice/model/metrics/DataInflux.java index 5eada8ed..2afb0836 100644 --- a/src/main/java/org/onedatashare/transferservice/odstransferservice/model/metrics/DataInflux.java +++ b/src/main/java/org/onedatashare/transferservice/odstransferservice/model/metrics/DataInflux.java @@ -159,4 +159,7 @@ public class DataInflux { @Column(name = JOB_UUID, tag = true) private UUID jobUuid; + + @Column(name = IS_RUNNING) + private Boolean isRunning; } diff --git a/src/main/java/org/onedatashare/transferservice/odstransferservice/service/cron/MetricsCollector.java b/src/main/java/org/onedatashare/transferservice/odstransferservice/service/cron/MetricsCollector.java index 2f68c69c..60971bae 100644 --- a/src/main/java/org/onedatashare/transferservice/odstransferservice/service/cron/MetricsCollector.java +++ b/src/main/java/org/onedatashare/transferservice/odstransferservice/service/cron/MetricsCollector.java @@ -134,6 +134,9 @@ public void collectAndSave() { lastPmeterData.setJobSize(jobParameters.getLong(ODSConstants.JOB_SIZE)); lastPmeterData.setAvgFileSize(jobParameters.getLong(ODSConstants.FILE_SIZE_AVG)); lastPmeterData.setOdsUser(jobParameters.getString(ODSConstants.OWNER_ID)); + lastPmeterData.setIsRunning(currentAggregateMetric.getStepExecution().getJobExecution().isRunning()); + }else{ + lastPmeterData.setIsRunning(false); } log.info(lastPmeterData.toString()); this.influxCache.clearCache(); diff --git a/src/main/java/org/onedatashare/transferservice/odstransferservice/service/step/vfs/VfsWriter.java b/src/main/java/org/onedatashare/transferservice/odstransferservice/service/step/vfs/VfsWriter.java index bf9e76bd..5441d40b 100644 --- a/src/main/java/org/onedatashare/transferservice/odstransferservice/service/step/vfs/VfsWriter.java +++ b/src/main/java/org/onedatashare/transferservice/odstransferservice/service/step/vfs/VfsWriter.java @@ -70,8 +70,7 @@ public void write(Chunk chunks) throws Exception { for (int i = 0; i < items.size(); i++) { DataChunk chunk = items.get(i); int bytesWritten = this.fileChannel.write(ByteBuffer.wrap(chunk.getData()), chunk.getStartPosition()); - if (chunk.getSize() != bytesWritten) - chunk = null; + chunk = null; } } } From 1c7dd14fa557c44bac1007f5b8125e352df0438b Mon Sep 17 00:00:00 2001 From: Jacob Goldverg Date: Mon, 22 Jan 2024 14:59:32 -0500 Subject: [PATCH 020/150] wrong key here accidentally --- .../odstransferservice/pools/ThreadPoolManagerVirtual.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/main/java/org/onedatashare/transferservice/odstransferservice/pools/ThreadPoolManagerVirtual.java b/src/main/java/org/onedatashare/transferservice/odstransferservice/pools/ThreadPoolManagerVirtual.java index 5780694a..ac7c6094 100644 --- a/src/main/java/org/onedatashare/transferservice/odstransferservice/pools/ThreadPoolManagerVirtual.java +++ b/src/main/java/org/onedatashare/transferservice/odstransferservice/pools/ThreadPoolManagerVirtual.java @@ -52,7 +52,7 @@ public void applyOptimizer(int concurrency, int parallel) { } for (String key : this.executorHashmap.keySet()) { if (key.contains(PARALLEL_POOL_PREFIX)) { - SimpleAsyncTaskExecutor parallelPool = this.executorHashmap.get(PARALLEL_POOL_PREFIX); + SimpleAsyncTaskExecutor parallelPool = this.executorHashmap.get(key); if (parallelPool != null) { if (parallel > 0 && parallel != parallelPool.getConcurrencyLimit()) { parallelPool.setConcurrencyLimit(parallel); From dd0415116ee6351a51c383537e05f4648e9f878b Mon Sep 17 00:00:00 2001 From: Jacob Goldverg Date: Tue, 6 Feb 2024 14:45:38 -0500 Subject: [PATCH 021/150] patching to do one chunk download if the file size == chunkSize --- .../service/step/AmazonS3/S3Expander.java | 2 ++ .../odstransferservice/service/step/FileExpander.java | 4 ++++ .../odstransferservice/service/step/box/BoxReader.java | 6 +++++- 3 files changed, 11 insertions(+), 1 deletion(-) create mode 100644 src/main/java/org/onedatashare/transferservice/odstransferservice/service/step/AmazonS3/S3Expander.java create mode 100644 src/main/java/org/onedatashare/transferservice/odstransferservice/service/step/FileExpander.java diff --git a/src/main/java/org/onedatashare/transferservice/odstransferservice/service/step/AmazonS3/S3Expander.java b/src/main/java/org/onedatashare/transferservice/odstransferservice/service/step/AmazonS3/S3Expander.java new file mode 100644 index 00000000..56a05110 --- /dev/null +++ b/src/main/java/org/onedatashare/transferservice/odstransferservice/service/step/AmazonS3/S3Expander.java @@ -0,0 +1,2 @@ +package org.onedatashare.transferservice.odstransferservice.service.step.AmazonS3;public class S3Expander { +} diff --git a/src/main/java/org/onedatashare/transferservice/odstransferservice/service/step/FileExpander.java b/src/main/java/org/onedatashare/transferservice/odstransferservice/service/step/FileExpander.java new file mode 100644 index 00000000..b3df5197 --- /dev/null +++ b/src/main/java/org/onedatashare/transferservice/odstransferservice/service/step/FileExpander.java @@ -0,0 +1,4 @@ +package org.onedatashare.transferservice.odstransferservice.service.step; + +public class ExpanderContract { +} diff --git a/src/main/java/org/onedatashare/transferservice/odstransferservice/service/step/box/BoxReader.java b/src/main/java/org/onedatashare/transferservice/odstransferservice/service/step/box/BoxReader.java index ae58c6dc..eae7f925 100644 --- a/src/main/java/org/onedatashare/transferservice/odstransferservice/service/step/box/BoxReader.java +++ b/src/main/java/org/onedatashare/transferservice/odstransferservice/service/step/box/BoxReader.java @@ -45,7 +45,11 @@ protected DataChunk doRead() { FilePart filePart = filePartitioner.nextPart(); if (filePart == null) return null; ByteArrayOutputStream byteArray = new ByteArrayOutputStream(); - this.currentFile.downloadRange(byteArray, filePart.getStart(), filePart.getEnd()); + if(this.fileInfo.getSize() == this.fileInfo.getChunkSize()){ + this.currentFile.download(byteArray); + }else{ + this.currentFile.downloadRange(byteArray, filePart.getStart(), filePart.getEnd()); + } DataChunk chunk = ODSUtility.makeChunk(filePart.getSize(), byteArray.toByteArray(), filePart.getStart(), Math.toIntExact(filePart.getPartIdx()), currentFile.getInfo().getName()); logger.info(chunk.toString()); return chunk; From d383072744ab50959f604664b9ff3f6f1f23c41f Mon Sep 17 00:00:00 2001 From: Jacob Goldverg Date: Tue, 6 Feb 2024 14:46:18 -0500 Subject: [PATCH 022/150] not sure why those two got included --- .../odstransferservice/service/step/AmazonS3/S3Expander.java | 2 -- .../odstransferservice/service/step/FileExpander.java | 4 ---- 2 files changed, 6 deletions(-) delete mode 100644 src/main/java/org/onedatashare/transferservice/odstransferservice/service/step/AmazonS3/S3Expander.java delete mode 100644 src/main/java/org/onedatashare/transferservice/odstransferservice/service/step/FileExpander.java diff --git a/src/main/java/org/onedatashare/transferservice/odstransferservice/service/step/AmazonS3/S3Expander.java b/src/main/java/org/onedatashare/transferservice/odstransferservice/service/step/AmazonS3/S3Expander.java deleted file mode 100644 index 56a05110..00000000 --- a/src/main/java/org/onedatashare/transferservice/odstransferservice/service/step/AmazonS3/S3Expander.java +++ /dev/null @@ -1,2 +0,0 @@ -package org.onedatashare.transferservice.odstransferservice.service.step.AmazonS3;public class S3Expander { -} diff --git a/src/main/java/org/onedatashare/transferservice/odstransferservice/service/step/FileExpander.java b/src/main/java/org/onedatashare/transferservice/odstransferservice/service/step/FileExpander.java deleted file mode 100644 index b3df5197..00000000 --- a/src/main/java/org/onedatashare/transferservice/odstransferservice/service/step/FileExpander.java +++ /dev/null @@ -1,4 +0,0 @@ -package org.onedatashare.transferservice.odstransferservice.service.step; - -public class ExpanderContract { -} From d38d66b9205ae49922494a325b30a798aa6bdac8 Mon Sep 17 00:00:00 2001 From: Jacob Goldverg Date: Wed, 7 Feb 2024 20:51:36 -0500 Subject: [PATCH 023/150] Few things: created a factory for readers, writers, and 2 expanders. Refactored bucket creation, also now we have default values for all measurement points going into influx. This was needed for the Gym env and optimizers --- Dockerfile | 2 +- pom.xml | 5 + .../config/BatchConfig.java | 20 ++- .../config/MetricsConfig.java | 10 -- .../consumer/RabbitMQConsumer.java | 10 +- .../controller/TransferController.java | 23 +-- .../model/BoxSmallFileUpload.java | 18 ++- .../model/metrics/DataInflux.java | 90 +++++------ .../DatabaseService/InfluxIOService.java | 15 +- .../service/JobControl.java | 152 +++--------------- .../service/{cron => }/MetricsCollector.java | 7 +- .../service/expanders/ExpanderFactory.java | 28 ++++ .../service/expanders/FileExpander.java | 15 ++ .../service/expanders/HttpExpander.java | 126 +++++++++++++++ .../service/{ => expanders}/VfsExpander.java | 23 +-- .../listner/JobCompletionListener.java | 2 +- .../AmazonS3/AmazonS3LargeFileWriter.java | 2 +- .../AmazonS3/AmazonS3SmallFileWriter.java | 2 +- .../service/step/ODSBaseWriter.java | 4 +- .../service/step/ReaderWriterFactory.java | 140 ++++++++++++++++ .../service/step/box/BoxReader.java | 12 +- .../service/step/box/BoxWriterLargeFile.java | 17 +- .../service/step/box/BoxWriterSmallFile.java | 10 +- .../step/dropbox/DropBoxChunkedWriter.java | 2 +- .../service/step/ftp/FTPWriter.java | 2 +- .../service/step/http/HttpReader.java | 28 ++-- .../service/step/scp/SCPWriter.java | 2 +- .../service/step/sftp/SFTPWriter.java | 2 +- .../service/step/vfs/VfsWriter.java | 2 +- 29 files changed, 468 insertions(+), 303 deletions(-) rename src/main/java/org/onedatashare/transferservice/odstransferservice/service/{cron => }/MetricsCollector.java (95%) create mode 100644 src/main/java/org/onedatashare/transferservice/odstransferservice/service/expanders/ExpanderFactory.java create mode 100644 src/main/java/org/onedatashare/transferservice/odstransferservice/service/expanders/FileExpander.java create mode 100644 src/main/java/org/onedatashare/transferservice/odstransferservice/service/expanders/HttpExpander.java rename src/main/java/org/onedatashare/transferservice/odstransferservice/service/{ => expanders}/VfsExpander.java (69%) create mode 100644 src/main/java/org/onedatashare/transferservice/odstransferservice/service/step/ReaderWriterFactory.java diff --git a/Dockerfile b/Dockerfile index 56022dc8..73e98aa8 100644 --- a/Dockerfile +++ b/Dockerfile @@ -10,7 +10,7 @@ FROM amazoncorretto:21-alpine3.18-jdk RUN apk update RUN apk --no-cache add python3-dev py3-pip build-base gcc linux-headers -RUN pip install pmeter-ods==1.0.11 +RUN pip install pmeter-ods COPY --from=build /home/app/target/ods-transfer-service-0.0.1-SNAPSHOT.jar /usr/local/lib/ods-transfer-service-0.0.1-SNAPSHOT.jar diff --git a/pom.xml b/pom.xml index a5dd7ab6..5b795391 100644 --- a/pom.xml +++ b/pom.xml @@ -130,6 +130,11 @@ org.springframework.boot spring-boot-starter-actuator + + org.jsoup + jsoup + 1.17.2 + org.springframework.boot spring-boot-starter-web diff --git a/src/main/java/org/onedatashare/transferservice/odstransferservice/config/BatchConfig.java b/src/main/java/org/onedatashare/transferservice/odstransferservice/config/BatchConfig.java index 826e235c..b27cd8ed 100644 --- a/src/main/java/org/onedatashare/transferservice/odstransferservice/config/BatchConfig.java +++ b/src/main/java/org/onedatashare/transferservice/odstransferservice/config/BatchConfig.java @@ -7,22 +7,18 @@ import org.springframework.context.annotation.Configuration; import org.springframework.core.task.SimpleAsyncTaskExecutor; import org.springframework.jdbc.datasource.DataSourceTransactionManager; +import org.springframework.retry.backoff.BackOffPolicy; +import org.springframework.retry.backoff.ExponentialBackOffPolicy; import org.springframework.transaction.PlatformTransactionManager; import javax.sql.DataSource; import java.util.HashSet; import java.util.Set; +import java.util.concurrent.TimeUnit; @Configuration public class BatchConfig { -// @Bean -// public JobLauncher jobLauncher(JobRepository jobRepository) { -// TaskExecutorJobLauncher taskExecutorJobLauncher = new TaskExecutorJobLauncher(); -// taskExecutorJobLauncher.setJobRepository(jobRepository); -// return taskExecutorJobLauncher; -// } - @Bean public Set jobIds() { return new HashSet<>(); @@ -40,5 +36,15 @@ public JobLauncher asyncJobLauncher(JobRepository jobRepository) { jobLauncher.setTaskExecutor(new SimpleAsyncTaskExecutor()); return jobLauncher; } + + + @Bean + public BackOffPolicy backOffPolicy() { + ExponentialBackOffPolicy backOffPolicy = new ExponentialBackOffPolicy(); + backOffPolicy.setInitialInterval(TimeUnit.SECONDS.toMillis(5)); + backOffPolicy.setMultiplier(2.0); + backOffPolicy.setMaxInterval(TimeUnit.DAYS.toMillis(1)); + return backOffPolicy; + } } diff --git a/src/main/java/org/onedatashare/transferservice/odstransferservice/config/MetricsConfig.java b/src/main/java/org/onedatashare/transferservice/odstransferservice/config/MetricsConfig.java index e952bdcd..ebd324b5 100644 --- a/src/main/java/org/onedatashare/transferservice/odstransferservice/config/MetricsConfig.java +++ b/src/main/java/org/onedatashare/transferservice/odstransferservice/config/MetricsConfig.java @@ -6,8 +6,6 @@ import lombok.Data; import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.Configuration; -import org.springframework.retry.backoff.FixedBackOffPolicy; -import org.springframework.retry.support.RetryTemplate; @Configuration @Data @@ -20,12 +18,4 @@ public ObjectMapper pmeterMapper() { return objectMapper; } - @Bean - public RetryTemplate retryTemplateForReaderAndWriter() { - RetryTemplate retryTemplate = new RetryTemplate(); - FixedBackOffPolicy fixedBackOffPolicy = new FixedBackOffPolicy(); - fixedBackOffPolicy.setBackOffPeriod(2000l); - retryTemplate.setBackOffPolicy(fixedBackOffPolicy); - return retryTemplate; - } } diff --git a/src/main/java/org/onedatashare/transferservice/odstransferservice/consumer/RabbitMQConsumer.java b/src/main/java/org/onedatashare/transferservice/odstransferservice/consumer/RabbitMQConsumer.java index 533a3d65..e70182a7 100644 --- a/src/main/java/org/onedatashare/transferservice/odstransferservice/consumer/RabbitMQConsumer.java +++ b/src/main/java/org/onedatashare/transferservice/odstransferservice/consumer/RabbitMQConsumer.java @@ -11,7 +11,7 @@ import org.onedatashare.transferservice.odstransferservice.pools.ThreadPoolContract; import org.onedatashare.transferservice.odstransferservice.service.JobControl; import org.onedatashare.transferservice.odstransferservice.service.JobParamService; -import org.onedatashare.transferservice.odstransferservice.service.VfsExpander; +import org.onedatashare.transferservice.odstransferservice.service.expanders.VfsExpander; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.amqp.core.Message; @@ -40,10 +40,8 @@ public class RabbitMQConsumer { Queue userQueue; - VfsExpander vfsExpander; - public RabbitMQConsumer(VfsExpander vfsExpander, Queue userQueue, JobParamService jobParamService, JobLauncher asyncJobLauncher, JobControl jc, ThreadPoolContract threadPool) { - this.vfsExpander = vfsExpander; + public RabbitMQConsumer(Queue userQueue, JobParamService jobParamService, JobLauncher asyncJobLauncher, JobControl jc, ThreadPoolContract threadPool) { this.userQueue = userQueue; this.jobParamService = jobParamService; this.jobLauncher = asyncJobLauncher; @@ -63,10 +61,6 @@ public void consumeDefaultMessage(final Message message) { TransferJobRequest request = objectMapper.readValue(jsonStr, TransferJobRequest.class); logger.info("Job Recieved: {}", request.toString()); - if (request.getSource().getType().equals(EndpointType.vfs)) { - List fileExpandedList = vfsExpander.expandDirectory(request.getSource().getInfoList(), request.getSource().getFileSourcePath()); - request.getSource().setInfoList(new ArrayList<>(fileExpandedList)); - } JobParameters parameters = jobParamService.translate(new JobParametersBuilder(), request); jc.setRequest(request); jobLauncher.run(jc.concurrentJobDefinition(), parameters); diff --git a/src/main/java/org/onedatashare/transferservice/odstransferservice/controller/TransferController.java b/src/main/java/org/onedatashare/transferservice/odstransferservice/controller/TransferController.java index fcb2bce3..d3ca4fd9 100644 --- a/src/main/java/org/onedatashare/transferservice/odstransferservice/controller/TransferController.java +++ b/src/main/java/org/onedatashare/transferservice/odstransferservice/controller/TransferController.java @@ -1,11 +1,8 @@ package org.onedatashare.transferservice.odstransferservice.controller; -import org.onedatashare.transferservice.odstransferservice.Enum.EndpointType; -import org.onedatashare.transferservice.odstransferservice.model.EntityInfo; import org.onedatashare.transferservice.odstransferservice.model.TransferJobRequest; import org.onedatashare.transferservice.odstransferservice.service.JobControl; import org.onedatashare.transferservice.odstransferservice.service.JobParamService; -import org.onedatashare.transferservice.odstransferservice.service.VfsExpander; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.batch.core.Job; @@ -13,7 +10,6 @@ import org.springframework.batch.core.JobParameters; import org.springframework.batch.core.JobParametersBuilder; import org.springframework.batch.core.launch.JobLauncher; -import org.springframework.beans.factory.annotation.Autowired; import org.springframework.http.HttpStatus; import org.springframework.http.ResponseEntity; import org.springframework.scheduling.annotation.Async; @@ -22,10 +18,6 @@ import org.springframework.web.bind.annotation.RequestMethod; import org.springframework.web.bind.annotation.RestController; -import java.util.ArrayList; -import java.util.List; -import java.util.Set; - /** * Transfer controller with to initiate transfer request @@ -36,26 +28,23 @@ public class TransferController { Logger logger = LoggerFactory.getLogger(TransferController.class); - @Autowired JobControl jc; - @Autowired JobLauncher asyncJobLauncher; - @Autowired JobParamService jobParamService; - @Autowired - VfsExpander vfsExpander; + public TransferController(JobControl jobControl, JobLauncher asyncJobLauncher, JobParamService jobParamService) { + this.jc = jobControl; + this.asyncJobLauncher = asyncJobLauncher; + this.jobParamService = jobParamService; + + } @RequestMapping(value = "/start", method = RequestMethod.POST) @Async public ResponseEntity start(@RequestBody TransferJobRequest request) throws Exception { logger.info("Controller Entry point"); - if (request.getSource().getType().equals(EndpointType.vfs)) { - List fileExpandedList = vfsExpander.expandDirectory(request.getSource().getInfoList(), request.getSource().getFileSourcePath()); - request.getSource().setInfoList(new ArrayList<>(fileExpandedList)); - } JobParameters parameters = jobParamService.translate(new JobParametersBuilder(), request); jc.setRequest(request); Job job = jc.concurrentJobDefinition(); diff --git a/src/main/java/org/onedatashare/transferservice/odstransferservice/model/BoxSmallFileUpload.java b/src/main/java/org/onedatashare/transferservice/odstransferservice/model/BoxSmallFileUpload.java index ad7f3764..0d328ced 100644 --- a/src/main/java/org/onedatashare/transferservice/odstransferservice/model/BoxSmallFileUpload.java +++ b/src/main/java/org/onedatashare/transferservice/odstransferservice/model/BoxSmallFileUpload.java @@ -25,13 +25,17 @@ public void addAllChunks(List chunks){ this.dataChunkPriorityQueue.addAll(chunks); } - public InputStream condenseListToOneStream(long size){ - byte[] data = new byte[Long.valueOf(size).intValue()];//we know this file will always be <= 20MB - ByteBuffer buffer = ByteBuffer.wrap(data); - for(DataChunk chunk : this.dataChunkPriorityQueue){ - buffer.put(chunk.getData()); + public InputStream condenseListToOneStream(){ + int totalLength = this.dataChunkPriorityQueue.stream().mapToInt(byteArray -> byteArray.getData().length).sum(); + byte[] combinedBytes = new byte[totalLength]; + + int currentIndex = 0; + for (DataChunk chunk : dataChunkPriorityQueue) { + byte[] byteArray = chunk.getData(); + System.arraycopy(byteArray, 0, combinedBytes, currentIndex, byteArray.length); + currentIndex += byteArray.length; } - this.dataChunkPriorityQueue.clear(); - return new ByteArrayInputStream(buffer.array()); + + return new ByteArrayInputStream(combinedBytes); } } diff --git a/src/main/java/org/onedatashare/transferservice/odstransferservice/model/metrics/DataInflux.java b/src/main/java/org/onedatashare/transferservice/odstransferservice/model/metrics/DataInflux.java index 2afb0836..3cb4af20 100644 --- a/src/main/java/org/onedatashare/transferservice/odstransferservice/model/metrics/DataInflux.java +++ b/src/main/java/org/onedatashare/transferservice/odstransferservice/model/metrics/DataInflux.java @@ -19,147 +19,147 @@ public class DataInflux { @JsonProperty(value = NETWORK_INTERFACE) @Column(name = NETWORK_INTERFACE) - private String networkInterface; + private String networkInterface = ""; @JsonProperty(value = ODS_USER) @Column(name = ODS_USER, tag = true) - private String odsUser; + private String odsUser = ""; @JsonProperty(value = TRANSFER_NODE_NAME) @Column(name = TRANSFER_NODE_NAME, tag = true) - private String transferNodeName; + private String transferNodeName = ""; @JsonProperty(value = ACTIVE_CORE_COUNT) @Column(name = ACTIVE_CORE_COUNT) - private Integer coreCount; + private Integer coreCount = Runtime.getRuntime().availableProcessors(); @JsonProperty(value = CPU_FREQUENCY_MAX) @Column(name = CPU_FREQUENCY_MAX) - private Double cpu_frequency_max; + private Double cpu_frequency_max = 0.0; @JsonProperty(value = CPU_FREQUENCY_CURRENT) @Column(name = CPU_FREQUENCY_CURRENT) - private Double cpu_frequency_current; + private Double cpu_frequency_current = 0.0; @JsonProperty(value = CPU_FREQUENCY_MIN) @Column(name = CPU_FREQUENCY_MIN) - private Double cpu_frequency_min; + private Double cpu_frequency_min = 0.0; @JsonProperty(value = CPU_ARCHITECTURE) @Column(name = CPU_ARCHITECTURE) - private String cpuArchitecture; + private String cpuArchitecture = ""; @JsonProperty(value = PACKET_LOSS_RATE) @Column(name = PACKET_LOSS_RATE) - private Double packetLossRate; + private Double packetLossRate = 0.0; //NIC values @JsonProperty(value = BYTES_SENT) @Column(name = BYTES_SENT) - private Long bytesSent; + private Long bytesSent = 0L; @JsonProperty(value = BYTES_RECEIVED) @Column(name = BYTES_RECEIVED) - private Long bytesReceived; + private Long bytesReceived = 0L; @JsonProperty(value = PACKETS_SENT) @Column(name = PACKETS_SENT) - private Long packetSent; + private Long packetSent = 0L; @JsonProperty(value = PACKETS_RECEIVED) @Column(name = PACKETS_RECEIVED) - private Long packetReceived; + private Long packetReceived = 0L; @JsonProperty(value = DROP_IN) @Column(name = DROP_IN) - private Long dropin; + private Long dropin = 0L; @JsonProperty(value = DROP_OUT) @Column(name = DROP_OUT) - private Long dropout; + private Long dropout = 0L; @JsonProperty(value = NIC_MTU) @Column(name = NIC_MTU) - private Integer nicMtu; + private Integer nicMtu = 0; @JsonProperty(value = NIC_SPEED) @Column(name = NIC_SPEED) - private Integer nicSpeed; + private Integer nicSpeed = 0; @JsonProperty(value = LATENCY) @Column(name = LATENCY) - private Double latency; + private Double latency = 0.0; @JsonProperty(value = RTT) @Column(name = RTT) - private Double rtt; + private Double rtt = 0.0; @Column(name = SOURCE_RTT) - private Double sourceRtt; + private Double sourceRtt = 0.0; @Column(name = SOURCE_LATENCY) - private Double sourceLatency; + private Double sourceLatency = 0.0; @Column(name = DESTINATION_RTT) - private Double destinationRtt; + private Double destinationRtt = 0.0; @Column(name = DEST_LATENCY) - private Double destLatency; + private Double destLatency = 0.0; @JsonProperty(value = ERROR_IN) @Column(name = ERROR_IN) - private Long errin; + private Long errin = 0L; @JsonProperty(value = ERROR_OUT) @Column(name = ERROR_OUT) - private Long errout; + private Long errout = 0L; //Job Values @Column(name = JOB_ID, tag = true) - private Long jobId; + private Long jobId = 0L; @Column(name = READ_THROUGHPUT) - private Double readThroughput; + private Double readThroughput = 0.0; @Column(name = WRITE_THROUGHPUT) - private Double writeThroughput; + private Double writeThroughput = 0.0; @Column(name = BYTES_UPLOADED) - private Long bytesWritten; + private Long bytesWritten = 0L; @Column(name = BYTES_DOWNLOADED) - private Long bytesRead; + private Long bytesRead = 0L; @Column(name = CONCURRENCY) - private Integer concurrency; + private Integer concurrency = 0; @Column(name = PARALLELISM) - private Integer parallelism; + private Integer parallelism = 0; @Column(name = PIPELINING) - private Integer pipelining; + private Integer pipelining = 0; @Column(name = MEMORY) - private Long memory; + private Long memory = Runtime.getRuntime().totalMemory() - Runtime.getRuntime().freeMemory(); @Column(name = MAX_MEMORY) - private Long maxMemory; + private Long maxMemory = Runtime.getRuntime().maxMemory(); @Column(name = FREE_MEMORY) - private Long freeMemory; + private Long freeMemory = Runtime.getRuntime().freeMemory(); @Column(name = ALLOCATED_MEMORY) - private Long allocatedMemory; + private Long allocatedMemory = Runtime.getRuntime().totalMemory(); @Column(name = JOB_SIZE) - private Long jobSize; + private Long jobSize = 0L; @Column(name = AVERAGE_FILE_SIZE) - private Long avgFileSize; + private Long avgFileSize = 0L; @Column(name = SOURCE_TYPE, tag = true) - private String sourceType; + private String sourceType = ""; @Column(name = SOURCE_CRED_ID, tag = true) - private String sourceCredId; + private String sourceCredId = ""; @Column(name = DESTINATION_TYPE, tag = true) - private String destType; + private String destType = ""; @Column(name = DESTINATION_CRED_IT, tag = true) - private String destCredId; + private String destCredId = ""; @Column(name = CHUNK_SIZE) - private Long chunksize; + private Long chunksize = 0L; @Column(name = JOB_UUID, tag = true) private UUID jobUuid; @Column(name = IS_RUNNING) - private Boolean isRunning; + private Boolean isRunning = false; } diff --git a/src/main/java/org/onedatashare/transferservice/odstransferservice/service/DatabaseService/InfluxIOService.java b/src/main/java/org/onedatashare/transferservice/odstransferservice/service/DatabaseService/InfluxIOService.java index 2f39e73e..62760fd7 100644 --- a/src/main/java/org/onedatashare/transferservice/odstransferservice/service/DatabaseService/InfluxIOService.java +++ b/src/main/java/org/onedatashare/transferservice/odstransferservice/service/DatabaseService/InfluxIOService.java @@ -6,15 +6,14 @@ import com.influxdb.client.domain.WritePrecision; import com.influxdb.exceptions.InfluxException; import com.influxdb.exceptions.UnprocessableEntityException; +import jakarta.annotation.PostConstruct; import org.onedatashare.transferservice.odstransferservice.model.metrics.DataInflux; import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import org.springframework.beans.factory.annotation.Autowired; import org.springframework.beans.factory.annotation.Value; -import org.springframework.stereotype.Component; import org.springframework.stereotype.Service; -@Component +@Service public class InfluxIOService { private final InfluxDBClient influxClient; @@ -26,6 +25,7 @@ public class InfluxIOService { @Value("${ods.influx.org}") String org; + Bucket bucket; private WriteApi writeApi; public InfluxIOService(InfluxDBClient influxClient) { @@ -33,9 +33,13 @@ public InfluxIOService(InfluxDBClient influxClient) { this.writeApi = this.influxClient.makeWriteApi(); } + @PostConstruct + public void postConstruct() { + this.reconfigureBucketForNewJob(this.bucketName); + } + public void reconfigureBucketForNewJob(String ownerId) { logger.info("********* Reconfiguring the Bucket ***********"); - Bucket bucket; if (ownerId == null) { bucket = influxClient.getBucketsApi().findBucketByName(this.bucketName); } else { @@ -49,13 +53,12 @@ public void reconfigureBucketForNewJob(String ownerId) { } catch (UnprocessableEntityException ignored) { } } - this.writeApi = this.influxClient.makeWriteApi(); } public void insertDataPoint(DataInflux point) { try { - writeApi.writeMeasurement(WritePrecision.MS, point); + writeApi.writeMeasurement(this.bucket.getName(), this.org, WritePrecision.MS, point); } catch (InfluxException exception) { logger.error("Exception occurred while pushing measurement to influx: " + exception.getMessage()); } diff --git a/src/main/java/org/onedatashare/transferservice/odstransferservice/service/JobControl.java b/src/main/java/org/onedatashare/transferservice/odstransferservice/service/JobControl.java index 5135ecf6..ae91699a 100644 --- a/src/main/java/org/onedatashare/transferservice/odstransferservice/service/JobControl.java +++ b/src/main/java/org/onedatashare/transferservice/odstransferservice/service/JobControl.java @@ -3,34 +3,14 @@ import lombok.Getter; import lombok.NoArgsConstructor; import lombok.Setter; -import org.onedatashare.transferservice.odstransferservice.Enum.EndpointType; import org.onedatashare.transferservice.odstransferservice.model.DataChunk; import org.onedatashare.transferservice.odstransferservice.model.EntityInfo; import org.onedatashare.transferservice.odstransferservice.model.TransferJobRequest; import org.onedatashare.transferservice.odstransferservice.pools.ThreadPoolContract; import org.onedatashare.transferservice.odstransferservice.service.DatabaseService.InfluxIOService; -import org.onedatashare.transferservice.odstransferservice.service.cron.MetricsCollector; +import org.onedatashare.transferservice.odstransferservice.service.expanders.ExpanderFactory; import org.onedatashare.transferservice.odstransferservice.service.listner.JobCompletionListener; -import org.onedatashare.transferservice.odstransferservice.service.step.AmazonS3.AmazonS3LargeFileWriter; -import org.onedatashare.transferservice.odstransferservice.service.step.AmazonS3.AmazonS3Reader; -import org.onedatashare.transferservice.odstransferservice.service.step.AmazonS3.AmazonS3SmallFileWriter; -import org.onedatashare.transferservice.odstransferservice.service.step.box.BoxReader; -import org.onedatashare.transferservice.odstransferservice.service.step.box.BoxWriterLargeFile; -import org.onedatashare.transferservice.odstransferservice.service.step.box.BoxWriterSmallFile; -import org.onedatashare.transferservice.odstransferservice.service.step.dropbox.DropBoxChunkedWriter; -import org.onedatashare.transferservice.odstransferservice.service.step.dropbox.DropBoxReader; -import org.onedatashare.transferservice.odstransferservice.service.step.ftp.FTPReader; -import org.onedatashare.transferservice.odstransferservice.service.step.ftp.FTPWriter; -import org.onedatashare.transferservice.odstransferservice.service.step.googleDrive.GDriveReader; -import org.onedatashare.transferservice.odstransferservice.service.step.googleDrive.GDriveResumableWriter; -import org.onedatashare.transferservice.odstransferservice.service.step.googleDrive.GDriveSimpleWriter; -import org.onedatashare.transferservice.odstransferservice.service.step.http.HttpReader; -import org.onedatashare.transferservice.odstransferservice.service.step.scp.SCPReader; -import org.onedatashare.transferservice.odstransferservice.service.step.scp.SCPWriter; -import org.onedatashare.transferservice.odstransferservice.service.step.sftp.SFTPReader; -import org.onedatashare.transferservice.odstransferservice.service.step.sftp.SFTPWriter; -import org.onedatashare.transferservice.odstransferservice.service.step.vfs.VfsReader; -import org.onedatashare.transferservice.odstransferservice.service.step.vfs.VfsWriter; +import org.onedatashare.transferservice.odstransferservice.service.step.ReaderWriterFactory; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.batch.core.Job; @@ -40,18 +20,14 @@ import org.springframework.batch.core.repository.JobRepository; import org.springframework.batch.core.step.builder.SimpleStepBuilder; import org.springframework.batch.core.step.builder.StepBuilder; -import org.springframework.batch.item.ItemReader; -import org.springframework.batch.item.ItemWriter; import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.retry.backoff.BackOffPolicy; import org.springframework.stereotype.Service; import org.springframework.transaction.PlatformTransactionManager; import java.util.List; import java.util.stream.Collectors; -import static org.onedatashare.transferservice.odstransferservice.constant.ODSConstants.FIVE_MB; -import static org.onedatashare.transferservice.odstransferservice.constant.ODSConstants.TWENTY_MB; - @Service @NoArgsConstructor @@ -64,7 +40,10 @@ public class JobControl { Logger logger = LoggerFactory.getLogger(JobControl.class); @Autowired - VfsExpander vfsExpander; + ExpanderFactory expanderFactory; + + @Autowired + ReaderWriterFactory readerWriterFactory; @Autowired JobRepository jobRepository; @@ -75,12 +54,6 @@ public class JobControl { @Autowired JobCompletionListener jobCompletionListener; - @Autowired - MetricsCollector metricsCollector; - - @Autowired - InfluxCache influxCache; - @Autowired PlatformTransactionManager platformTransactionManager; @@ -90,12 +63,12 @@ public class JobControl { @Autowired ThreadPoolContract threadPool; - private List createConcurrentFlow(List infoList, String basePath) { - if (this.request.getSource().getType().equals(EndpointType.vfs)) { - infoList = vfsExpander.expandDirectory(infoList, basePath); - logger.info("File list: {}", infoList); - } - return infoList.stream().map(file -> { + @Autowired + BackOffPolicy backOffPolicy; + + private List createConcurrentFlow(String basePath) { + List fileInfo = expanderFactory.getExpander(this.request.getSource()); + return fileInfo.stream().map(file -> { String idForStep = ""; if (!file.getId().isEmpty()) { idForStep = file.getId(); @@ -105,115 +78,26 @@ private List createConcurrentFlow(List infoList, String basePa SimpleStepBuilder stepBuilder = new StepBuilder(idForStep, this.jobRepository) .chunk(this.request.getOptions().getPipeSize(), this.platformTransactionManager); stepBuilder - .reader(getRightReader(request.getSource().getType(), file)) - .writer(getRightWriter(request.getDestination().getType(), file)); + .reader(readerWriterFactory.getRightReader(this.request.getSource(), file, this.request.getOptions())) + .writer(readerWriterFactory.getRightWriter(request.getDestination(), file)); if (this.request.getOptions().getParallelThreadCount() > 0) { stepBuilder.taskExecutor(threadPool.parallelPool(request.getOptions().getParallelThreadCount(), file.getPath())); } stepBuilder.throttleLimit(64); + stepBuilder.faultTolerant() + .backOffPolicy(this.backOffPolicy); return new FlowBuilder(basePath + idForStep) .start(stepBuilder.build()).build(); }).collect(Collectors.toList()); } - protected ItemReader getRightReader(EndpointType type, EntityInfo fileInfo) { - switch (type) { - case http: - HttpReader hr = new HttpReader(fileInfo, request.getSource().getVfsSourceCredential()); - hr.setPool(connectionBag.getHttpReaderPool()); - return hr; - case vfs: - VfsReader vfsReader = new VfsReader(request.getSource().getVfsSourceCredential(), fileInfo); - return vfsReader; - case sftp: - SFTPReader sftpReader = new SFTPReader(request.getSource().getVfsSourceCredential(), fileInfo, request.getOptions().getPipeSize()); - sftpReader.setPool(connectionBag.getSftpReaderPool()); - return sftpReader; - case ftp: - FTPReader ftpReader = new FTPReader(request.getSource().getVfsSourceCredential(), fileInfo); - ftpReader.setPool(connectionBag.getFtpReaderPool()); - return ftpReader; - case s3: - AmazonS3Reader amazonS3Reader = new AmazonS3Reader(request.getSource().getVfsSourceCredential(), fileInfo); - amazonS3Reader.setPool(connectionBag.getS3ReaderPool()); - return amazonS3Reader; - case box: - BoxReader boxReader = new BoxReader(request.getSource().getOauthSourceCredential(), fileInfo); - boxReader.setMaxRetry(this.request.getOptions().getRetry()); - return boxReader; - case dropbox: - DropBoxReader dropBoxReader = new DropBoxReader(request.getSource().getOauthSourceCredential(), fileInfo); - return dropBoxReader; - case scp: - SCPReader reader = new SCPReader(fileInfo); - reader.setPool(connectionBag.getSftpReaderPool()); - return reader; - case gdrive: - GDriveReader dDriveReader = new GDriveReader(request.getSource().getOauthSourceCredential(), fileInfo); - return dDriveReader; - } - return null; - } - - protected ItemWriter getRightWriter(EndpointType type, EntityInfo fileInfo) { - switch (type) { - case vfs: - VfsWriter vfsWriter = new VfsWriter(request.getDestination().getVfsDestCredential(), fileInfo, this.metricsCollector, this.influxCache); - return vfsWriter; - case sftp: - SFTPWriter sftpWriter = new SFTPWriter(request.getDestination().getVfsDestCredential(), this.metricsCollector, this.influxCache); - sftpWriter.setPool(connectionBag.getSftpWriterPool()); - return sftpWriter; - case ftp: - FTPWriter ftpWriter = new FTPWriter(request.getDestination().getVfsDestCredential(), fileInfo, this.metricsCollector, this.influxCache); - ftpWriter.setPool(connectionBag.getFtpWriterPool()); - return ftpWriter; - case s3: - if (fileInfo.getSize() < TWENTY_MB) { - AmazonS3SmallFileWriter amazonS3SmallFileWriter = new AmazonS3SmallFileWriter(request.getDestination().getVfsDestCredential(), fileInfo, this.metricsCollector, this.influxCache); - amazonS3SmallFileWriter.setPool(connectionBag.getS3WriterPool()); - return amazonS3SmallFileWriter; - } else { - AmazonS3LargeFileWriter amazonS3LargeFileWriter = new AmazonS3LargeFileWriter(request.getDestination().getVfsDestCredential(), fileInfo, this.metricsCollector, this.influxCache); - amazonS3LargeFileWriter.setPool(connectionBag.getS3WriterPool()); - return amazonS3LargeFileWriter; - } - case box: - if (fileInfo.getSize() < TWENTY_MB) { - BoxWriterSmallFile boxWriterSmallFile = new BoxWriterSmallFile(request.getDestination().getOauthDestCredential(), fileInfo, this.metricsCollector, this.influxCache); - return boxWriterSmallFile; - } else { - BoxWriterLargeFile boxWriterLargeFile = new BoxWriterLargeFile(request.getDestination().getOauthDestCredential(), fileInfo, this.metricsCollector, this.influxCache); - return boxWriterLargeFile; - } - case dropbox: - DropBoxChunkedWriter dropBoxChunkedWriter = new DropBoxChunkedWriter(request.getDestination().getOauthDestCredential(), this.metricsCollector, this.influxCache); - return dropBoxChunkedWriter; - case scp: - SCPWriter scpWriter = new SCPWriter(fileInfo, this.metricsCollector, this.influxCache); - scpWriter.setPool(connectionBag.getSftpWriterPool()); - return scpWriter; - case gdrive: - if (fileInfo.getSize() < FIVE_MB) { - GDriveSimpleWriter writer = new GDriveSimpleWriter(request.getDestination().getOauthDestCredential(), fileInfo); - return writer; - } else { - GDriveResumableWriter writer = new GDriveResumableWriter(request.getDestination().getOauthDestCredential(), fileInfo); - writer.setPool(connectionBag.getGoogleDriveWriterPool()); - return writer; - } - } - return null; - } - public Job concurrentJobDefinition() { JobBuilder jobBuilder = new JobBuilder(this.request.getJobUuid().toString(), this.jobRepository); connectionBag.preparePools(this.request); - List flows = createConcurrentFlow(request.getSource().getInfoList(), request.getSource().getFileSourcePath()); + List flows = createConcurrentFlow(request.getSource().getFileSourcePath()); this.influxIOService.reconfigureBucketForNewJob(this.request.getOwnerId()); Flow[] fl = new Flow[flows.size()]; Flow f = new FlowBuilder("splitFlow") -// .split(this.threadPoolManager.stepTaskExecutorVirtual(this.request.getOptions().getConcurrencyThreadCount())) .split(this.threadPool.stepPool(this.request.getOptions().getConcurrencyThreadCount())) .add(flows.toArray(fl)) .build(); diff --git a/src/main/java/org/onedatashare/transferservice/odstransferservice/service/cron/MetricsCollector.java b/src/main/java/org/onedatashare/transferservice/odstransferservice/service/MetricsCollector.java similarity index 95% rename from src/main/java/org/onedatashare/transferservice/odstransferservice/service/cron/MetricsCollector.java rename to src/main/java/org/onedatashare/transferservice/odstransferservice/service/MetricsCollector.java index 60971bae..f722b67a 100644 --- a/src/main/java/org/onedatashare/transferservice/odstransferservice/service/cron/MetricsCollector.java +++ b/src/main/java/org/onedatashare/transferservice/odstransferservice/service/MetricsCollector.java @@ -1,4 +1,4 @@ -package org.onedatashare.transferservice.odstransferservice.service.cron; +package org.onedatashare.transferservice.odstransferservice.service; import lombok.Getter; import lombok.Setter; @@ -7,9 +7,6 @@ import org.onedatashare.transferservice.odstransferservice.model.JobMetric; import org.onedatashare.transferservice.odstransferservice.model.metrics.DataInflux; import org.onedatashare.transferservice.odstransferservice.service.DatabaseService.InfluxIOService; -import org.onedatashare.transferservice.odstransferservice.service.InfluxCache; -import org.onedatashare.transferservice.odstransferservice.service.LatencyRtt; -import org.onedatashare.transferservice.odstransferservice.service.PmeterParser; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.batch.core.JobParameters; @@ -85,7 +82,7 @@ public void collectAndSave() { long maxMem = Runtime.getRuntime().maxMemory(); JobMetric currentAggregateMetric = influxCache.aggregateMetric(); //this metrics throughput is the throughput of the whole map in influxCache. DataInflux lastPmeterData; - if (this.metrics.size() < 1) { + if (this.metrics.isEmpty()) { this.metrics.add(new DataInflux()); lastPmeterData = metrics.get(metrics.size() - 1); } else { diff --git a/src/main/java/org/onedatashare/transferservice/odstransferservice/service/expanders/ExpanderFactory.java b/src/main/java/org/onedatashare/transferservice/odstransferservice/service/expanders/ExpanderFactory.java new file mode 100644 index 00000000..2098ef7c --- /dev/null +++ b/src/main/java/org/onedatashare/transferservice/odstransferservice/service/expanders/ExpanderFactory.java @@ -0,0 +1,28 @@ +package org.onedatashare.transferservice.odstransferservice.service.expanders; + +import org.onedatashare.transferservice.odstransferservice.Enum.EndpointType; +import org.onedatashare.transferservice.odstransferservice.model.EntityInfo; +import org.onedatashare.transferservice.odstransferservice.model.TransferJobRequest; +import org.springframework.stereotype.Service; + +import java.util.List; + +@Service +public class ExpanderFactory { + + public List getExpander(TransferJobRequest.Source source){ + switch (source.getType()){ + case vfs -> { + VfsExpander vfsExpander = new VfsExpander(); + vfsExpander.createClient(source.getVfsSourceCredential()); + return vfsExpander.expandedFileSystem(source.getInfoList(), source.getFileSourcePath()); + } + case http -> { + HttpExpander httpExpander = new HttpExpander(); + httpExpander.createClient(source.getVfsSourceCredential()); + return httpExpander.expandedFileSystem(source.getInfoList(), source.getFileSourcePath()); + } + } + return source.getInfoList(); + } +} diff --git a/src/main/java/org/onedatashare/transferservice/odstransferservice/service/expanders/FileExpander.java b/src/main/java/org/onedatashare/transferservice/odstransferservice/service/expanders/FileExpander.java new file mode 100644 index 00000000..b5e45968 --- /dev/null +++ b/src/main/java/org/onedatashare/transferservice/odstransferservice/service/expanders/FileExpander.java @@ -0,0 +1,15 @@ +package org.onedatashare.transferservice.odstransferservice.service.expanders; + +import org.onedatashare.transferservice.odstransferservice.model.EntityInfo; +import org.onedatashare.transferservice.odstransferservice.model.credential.EndpointCredential; + +import java.util.List; + +public interface FileExpander { + + public void createClient(EndpointCredential credential); + + public List expandedFileSystem(List userSelectedResources, String basePath); + + +} diff --git a/src/main/java/org/onedatashare/transferservice/odstransferservice/service/expanders/HttpExpander.java b/src/main/java/org/onedatashare/transferservice/odstransferservice/service/expanders/HttpExpander.java new file mode 100644 index 00000000..e3953d13 --- /dev/null +++ b/src/main/java/org/onedatashare/transferservice/odstransferservice/service/expanders/HttpExpander.java @@ -0,0 +1,126 @@ +package org.onedatashare.transferservice.odstransferservice.service.expanders; + +import lombok.SneakyThrows; +import org.jsoup.Jsoup; +import org.jsoup.nodes.Document; +import org.jsoup.nodes.Element; +import org.jsoup.select.Elements; +import org.onedatashare.transferservice.odstransferservice.model.EntityInfo; +import org.onedatashare.transferservice.odstransferservice.model.credential.AccountEndpointCredential; +import org.onedatashare.transferservice.odstransferservice.model.credential.EndpointCredential; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.springframework.stereotype.Service; + +import java.io.IOException; +import java.net.URL; +import java.net.URLConnection; +import java.nio.file.Path; +import java.nio.file.Paths; +import java.util.ArrayList; +import java.util.List; +import java.util.Stack; + +public class HttpExpander implements FileExpander{ + + private AccountEndpointCredential credential; + Logger logger = LoggerFactory.getLogger(HttpExpander.class); + + @Override + public void createClient(EndpointCredential credential) { + this.credential = (AccountEndpointCredential) credential; + logger.info(this.credential.toString()); + + } + + @SneakyThrows + @Override + public List expandedFileSystem(List userSelectedResources, String basePath) { + List filesToSend = new ArrayList<>(); + Stack directoriesToTraverse = new Stack<>(); + if (basePath.isEmpty()) basePath = "/"; + if (userSelectedResources.isEmpty()) { //we move the whole damn server + logger.info("User resources is empty gonna just send the whole server I guess"); + Document doc = Jsoup.connect(this.credential.getUri() + basePath).get(); + Elements links = doc.select("body a"); + for (Element elem : links) { + if (elem.text().endsWith("/")) { //directory to expand + directoriesToTraverse.push(elem); + } else { //we have a file + filesToSend.add(fromElement(elem, 0)); + } + } + } else { //move only files/folders the user selected + for (EntityInfo selectedFiles : userSelectedResources) { + //we have a folder to transfer + if(selectedFiles.getPath().endsWith("/")){ + Document doc = Jsoup.connect(this.credential.getUri() + basePath + selectedFiles.getPath()) + .ignoreContentType(true) + .get(); + logger.info(doc.toString()); + Elements links = doc.select("body a"); + for (Element elem : links) { + if (elem.text().endsWith("/")) { //directory to expand + directoriesToTraverse.push(elem); + } else { //we have a file + filesToSend.add(fromElement(elem, selectedFiles.getChunkSize())); + } + } + }else{ + filesToSend.add(this.fileToInfo(this.credential.getUri() + Paths.get(basePath, selectedFiles.getPath()).toString(), selectedFiles.getChunkSize())); + } + } + } + //all of these have names that should be appended to the path + while (!directoriesToTraverse.isEmpty()) { + Element directory = directoriesToTraverse.pop(); + if (directory.text().contains("..") || directory.text().contains(".")) { + continue; + } + logger.info(directory.baseUri() + directory.text()); + Document doc = Jsoup.connect(directory.baseUri() + basePath +directory.text()).get(); + Elements links = doc.select("body a"); + for (Element elem : links) { + if (elem.text().endsWith("/")) { //directory to expand + directoriesToTraverse.push(elem); + } else { //we have a file + filesToSend.add(fromElement(elem, 0)); + } + } + } + return filesToSend; + } + + public EntityInfo fromElement(Element elem, Integer chunkSize) throws IOException { + EntityInfo fileInfo = new EntityInfo(); + if(chunkSize == 0){ + chunkSize = 16000000; + } + URL url = new URL(elem.absUrl("href")); + long fileSize = url.openConnection().getContentLengthLong(); + Path path = Path.of(url.getPath()); + logger.info("File path: {}, name: {}, size: {},", path, elem.text(), fileSize); + fileInfo.setId(elem.text()); + fileInfo.setSize(fileSize); + fileInfo.setPath(path.toAbsolutePath().toString()); + fileInfo.setChunkSize(chunkSize); + return fileInfo; + } + + public EntityInfo fileToInfo(String strUrl, Integer chunkSize) throws IOException { + if(chunkSize == 0){ + chunkSize = 16000000; + } + EntityInfo fileInfo = new EntityInfo(); + URL url = new URL(strUrl); + URLConnection conn = url.openConnection(); + long fileSize = conn.getContentLengthLong(); + String fileName = Paths.get(conn.getURL().getFile()).getFileName().toString(); + logger.info("File path: {}, name: {}, size: {},", url.getPath(), fileName, fileSize); + fileInfo.setId(fileName); + fileInfo.setSize(fileSize); + fileInfo.setPath(url.getPath()); + fileInfo.setChunkSize(chunkSize); + return fileInfo; + } +} diff --git a/src/main/java/org/onedatashare/transferservice/odstransferservice/service/VfsExpander.java b/src/main/java/org/onedatashare/transferservice/odstransferservice/service/expanders/VfsExpander.java similarity index 69% rename from src/main/java/org/onedatashare/transferservice/odstransferservice/service/VfsExpander.java rename to src/main/java/org/onedatashare/transferservice/odstransferservice/service/expanders/VfsExpander.java index ebe9eb6c..e0274542 100644 --- a/src/main/java/org/onedatashare/transferservice/odstransferservice/service/VfsExpander.java +++ b/src/main/java/org/onedatashare/transferservice/odstransferservice/service/expanders/VfsExpander.java @@ -1,6 +1,8 @@ -package org.onedatashare.transferservice.odstransferservice.service; +package org.onedatashare.transferservice.odstransferservice.service.expanders; import org.onedatashare.transferservice.odstransferservice.model.EntityInfo; +import org.onedatashare.transferservice.odstransferservice.model.credential.EndpointCredential; +import org.onedatashare.transferservice.odstransferservice.service.FilePartitioner; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.stereotype.Service; @@ -11,21 +13,22 @@ import java.util.List; import java.util.Stack; -@Service -public class VfsExpander { +public class VfsExpander implements FileExpander { Logger logger; public VfsExpander() { - this.logger = LoggerFactory.getLogger(FilePartitioner.class); + this.logger = LoggerFactory.getLogger(VfsExpander.class); } - public List expandDirectory(List userResources, String basePath) { + public void createClient(EndpointCredential credential) {} + + public List expandedFileSystem(List userSelectedResources, String basePath) { List endList = new ArrayList<>(); Stack traversalStack = new Stack<>(); //only directories on the stack. - logger.info("Expanding files VFS: {}", userResources); - if (userResources.size() == 0) return endList; //this case should never happen. - for (EntityInfo fileInfo : userResources) { + logger.info("Expanding files VFS: {}", userSelectedResources); + if (userSelectedResources.isEmpty()) return endList; //this case should never happen. + for (EntityInfo fileInfo : userSelectedResources) { Path path = Path.of(fileInfo.getPath()); if (path.toFile().isDirectory()) { traversalStack.push(path.toFile()); @@ -39,9 +42,9 @@ public List expandDirectory(List userResources, String b File[] files = resource.listFiles(); if (files == null) continue; for (File file : files) { - if(file.isDirectory()){ + if (file.isDirectory()) { traversalStack.push(file); - }else{ + } else { endList.add(fileToEntity(file, 0)); } } diff --git a/src/main/java/org/onedatashare/transferservice/odstransferservice/service/listner/JobCompletionListener.java b/src/main/java/org/onedatashare/transferservice/odstransferservice/service/listner/JobCompletionListener.java index 5b6228b2..f6060826 100644 --- a/src/main/java/org/onedatashare/transferservice/odstransferservice/service/listner/JobCompletionListener.java +++ b/src/main/java/org/onedatashare/transferservice/odstransferservice/service/listner/JobCompletionListener.java @@ -6,7 +6,7 @@ import org.onedatashare.transferservice.odstransferservice.pools.ThreadPoolContract; import org.onedatashare.transferservice.odstransferservice.service.ConnectionBag; import org.onedatashare.transferservice.odstransferservice.service.OptimizerService; -import org.onedatashare.transferservice.odstransferservice.service.cron.MetricsCollector; +import org.onedatashare.transferservice.odstransferservice.service.MetricsCollector; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.batch.core.JobExecution; diff --git a/src/main/java/org/onedatashare/transferservice/odstransferservice/service/step/AmazonS3/AmazonS3LargeFileWriter.java b/src/main/java/org/onedatashare/transferservice/odstransferservice/service/step/AmazonS3/AmazonS3LargeFileWriter.java index 80403ba1..6a6178bc 100644 --- a/src/main/java/org/onedatashare/transferservice/odstransferservice/service/step/AmazonS3/AmazonS3LargeFileWriter.java +++ b/src/main/java/org/onedatashare/transferservice/odstransferservice/service/step/AmazonS3/AmazonS3LargeFileWriter.java @@ -9,7 +9,7 @@ import org.onedatashare.transferservice.odstransferservice.model.credential.AccountEndpointCredential; import org.onedatashare.transferservice.odstransferservice.pools.S3ConnectionPool; import org.onedatashare.transferservice.odstransferservice.service.InfluxCache; -import org.onedatashare.transferservice.odstransferservice.service.cron.MetricsCollector; +import org.onedatashare.transferservice.odstransferservice.service.MetricsCollector; import org.onedatashare.transferservice.odstransferservice.service.step.ODSBaseWriter; import org.onedatashare.transferservice.odstransferservice.utility.ODSUtility; import org.slf4j.Logger; diff --git a/src/main/java/org/onedatashare/transferservice/odstransferservice/service/step/AmazonS3/AmazonS3SmallFileWriter.java b/src/main/java/org/onedatashare/transferservice/odstransferservice/service/step/AmazonS3/AmazonS3SmallFileWriter.java index 350c0053..f50130a6 100644 --- a/src/main/java/org/onedatashare/transferservice/odstransferservice/service/step/AmazonS3/AmazonS3SmallFileWriter.java +++ b/src/main/java/org/onedatashare/transferservice/odstransferservice/service/step/AmazonS3/AmazonS3SmallFileWriter.java @@ -11,7 +11,7 @@ import org.onedatashare.transferservice.odstransferservice.model.credential.AccountEndpointCredential; import org.onedatashare.transferservice.odstransferservice.pools.S3ConnectionPool; import org.onedatashare.transferservice.odstransferservice.service.InfluxCache; -import org.onedatashare.transferservice.odstransferservice.service.cron.MetricsCollector; +import org.onedatashare.transferservice.odstransferservice.service.MetricsCollector; import org.onedatashare.transferservice.odstransferservice.service.step.ODSBaseWriter; import org.slf4j.Logger; import org.slf4j.LoggerFactory; diff --git a/src/main/java/org/onedatashare/transferservice/odstransferservice/service/step/ODSBaseWriter.java b/src/main/java/org/onedatashare/transferservice/odstransferservice/service/step/ODSBaseWriter.java index be8ec4db..293771ce 100644 --- a/src/main/java/org/onedatashare/transferservice/odstransferservice/service/step/ODSBaseWriter.java +++ b/src/main/java/org/onedatashare/transferservice/odstransferservice/service/step/ODSBaseWriter.java @@ -2,9 +2,7 @@ import org.onedatashare.transferservice.odstransferservice.model.DataChunk; import org.onedatashare.transferservice.odstransferservice.service.InfluxCache; -import org.onedatashare.transferservice.odstransferservice.service.cron.MetricsCollector; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; +import org.onedatashare.transferservice.odstransferservice.service.MetricsCollector; import org.springframework.batch.core.StepExecution; import org.springframework.batch.core.annotation.AfterRead; import org.springframework.batch.core.annotation.AfterWrite; diff --git a/src/main/java/org/onedatashare/transferservice/odstransferservice/service/step/ReaderWriterFactory.java b/src/main/java/org/onedatashare/transferservice/odstransferservice/service/step/ReaderWriterFactory.java new file mode 100644 index 00000000..2b32f7a1 --- /dev/null +++ b/src/main/java/org/onedatashare/transferservice/odstransferservice/service/step/ReaderWriterFactory.java @@ -0,0 +1,140 @@ +package org.onedatashare.transferservice.odstransferservice.service.step; + +import org.onedatashare.transferservice.odstransferservice.model.DataChunk; +import org.onedatashare.transferservice.odstransferservice.model.EntityInfo; +import org.onedatashare.transferservice.odstransferservice.model.TransferJobRequest; +import org.onedatashare.transferservice.odstransferservice.model.TransferOptions; +import org.onedatashare.transferservice.odstransferservice.service.ConnectionBag; +import org.onedatashare.transferservice.odstransferservice.service.InfluxCache; +import org.onedatashare.transferservice.odstransferservice.service.MetricsCollector; +import org.onedatashare.transferservice.odstransferservice.service.step.AmazonS3.AmazonS3LargeFileWriter; +import org.onedatashare.transferservice.odstransferservice.service.step.AmazonS3.AmazonS3Reader; +import org.onedatashare.transferservice.odstransferservice.service.step.AmazonS3.AmazonS3SmallFileWriter; +import org.onedatashare.transferservice.odstransferservice.service.step.box.BoxReader; +import org.onedatashare.transferservice.odstransferservice.service.step.box.BoxWriterLargeFile; +import org.onedatashare.transferservice.odstransferservice.service.step.box.BoxWriterSmallFile; +import org.onedatashare.transferservice.odstransferservice.service.step.dropbox.DropBoxChunkedWriter; +import org.onedatashare.transferservice.odstransferservice.service.step.dropbox.DropBoxReader; +import org.onedatashare.transferservice.odstransferservice.service.step.ftp.FTPReader; +import org.onedatashare.transferservice.odstransferservice.service.step.ftp.FTPWriter; +import org.onedatashare.transferservice.odstransferservice.service.step.googleDrive.GDriveReader; +import org.onedatashare.transferservice.odstransferservice.service.step.googleDrive.GDriveResumableWriter; +import org.onedatashare.transferservice.odstransferservice.service.step.googleDrive.GDriveSimpleWriter; +import org.onedatashare.transferservice.odstransferservice.service.step.http.HttpReader; +import org.onedatashare.transferservice.odstransferservice.service.step.scp.SCPReader; +import org.onedatashare.transferservice.odstransferservice.service.step.scp.SCPWriter; +import org.onedatashare.transferservice.odstransferservice.service.step.sftp.SFTPReader; +import org.onedatashare.transferservice.odstransferservice.service.step.sftp.SFTPWriter; +import org.onedatashare.transferservice.odstransferservice.service.step.vfs.VfsReader; +import org.onedatashare.transferservice.odstransferservice.service.step.vfs.VfsWriter; +import org.springframework.batch.item.ItemReader; +import org.springframework.batch.item.ItemWriter; +import org.springframework.stereotype.Service; + +import static org.onedatashare.transferservice.odstransferservice.constant.ODSConstants.FIVE_MB; +import static org.onedatashare.transferservice.odstransferservice.constant.ODSConstants.TWENTY_MB; + +@Service +public class ReaderWriterFactory { + + private final ConnectionBag connectionBag; + private final InfluxCache influxCache; + private final MetricsCollector metricsCollector; + + public ReaderWriterFactory(ConnectionBag connectionBag, InfluxCache influxCache, MetricsCollector metricsCollector) { + this.connectionBag = connectionBag; + this.influxCache = influxCache; + this.metricsCollector = metricsCollector; + } + + public ItemReader getRightReader(TransferJobRequest.Source source, EntityInfo fileInfo, TransferOptions transferOptions) { + switch (source.getType()) { + case http: + HttpReader hr = new HttpReader(fileInfo, source.getVfsSourceCredential()); + hr.setPool(connectionBag.getHttpReaderPool()); + return hr; + case vfs: + VfsReader vfsReader = new VfsReader(source.getVfsSourceCredential(), fileInfo); + return vfsReader; + case sftp: + SFTPReader sftpReader = new SFTPReader(source.getVfsSourceCredential(), fileInfo, transferOptions.getPipeSize()); + sftpReader.setPool(connectionBag.getSftpReaderPool()); + return sftpReader; + case ftp: + FTPReader ftpReader = new FTPReader(source.getVfsSourceCredential(), fileInfo); + ftpReader.setPool(connectionBag.getFtpReaderPool()); + return ftpReader; + case s3: + AmazonS3Reader amazonS3Reader = new AmazonS3Reader(source.getVfsSourceCredential(), fileInfo); + amazonS3Reader.setPool(connectionBag.getS3ReaderPool()); + return amazonS3Reader; + case box: + BoxReader boxReader = new BoxReader(source.getOauthSourceCredential(), fileInfo); + boxReader.setMaxRetry(transferOptions.getRetry()); + return boxReader; + case dropbox: + DropBoxReader dropBoxReader = new DropBoxReader(source.getOauthSourceCredential(), fileInfo); + return dropBoxReader; + case scp: + SCPReader reader = new SCPReader(fileInfo); + reader.setPool(connectionBag.getSftpReaderPool()); + return reader; + case gdrive: + GDriveReader dDriveReader = new GDriveReader(source.getOauthSourceCredential(), fileInfo); + return dDriveReader; + } + return null; + } + + public ItemWriter getRightWriter(TransferJobRequest.Destination destination, EntityInfo fileInfo) { + switch (destination.getType()) { + case vfs: + VfsWriter vfsWriter = new VfsWriter(destination.getVfsDestCredential(), fileInfo, this.metricsCollector, this.influxCache); + return vfsWriter; + case sftp: + SFTPWriter sftpWriter = new SFTPWriter(destination.getVfsDestCredential(), this.metricsCollector, this.influxCache); + sftpWriter.setPool(connectionBag.getSftpWriterPool()); + return sftpWriter; + case ftp: + FTPWriter ftpWriter = new FTPWriter(destination.getVfsDestCredential(), fileInfo, this.metricsCollector, this.influxCache); + ftpWriter.setPool(connectionBag.getFtpWriterPool()); + return ftpWriter; + case s3: + if (fileInfo.getSize() < TWENTY_MB) { + AmazonS3SmallFileWriter amazonS3SmallFileWriter = new AmazonS3SmallFileWriter(destination.getVfsDestCredential(), fileInfo, this.metricsCollector, this.influxCache); + amazonS3SmallFileWriter.setPool(connectionBag.getS3WriterPool()); + return amazonS3SmallFileWriter; + } else { + AmazonS3LargeFileWriter amazonS3LargeFileWriter = new AmazonS3LargeFileWriter(destination.getVfsDestCredential(), fileInfo, this.metricsCollector, this.influxCache); + amazonS3LargeFileWriter.setPool(connectionBag.getS3WriterPool()); + return amazonS3LargeFileWriter; + } + case box: + if (fileInfo.getSize() < TWENTY_MB) { + BoxWriterSmallFile boxWriterSmallFile = new BoxWriterSmallFile(destination.getOauthDestCredential(), fileInfo, this.metricsCollector, this.influxCache); + return boxWriterSmallFile; + } else { + BoxWriterLargeFile boxWriterLargeFile = new BoxWriterLargeFile(destination.getOauthDestCredential(), fileInfo, this.metricsCollector, this.influxCache); + return boxWriterLargeFile; + } + case dropbox: + return new DropBoxChunkedWriter(destination.getOauthDestCredential(), this.metricsCollector, this.influxCache); + case scp: + SCPWriter scpWriter = new SCPWriter(fileInfo, this.metricsCollector, this.influxCache); + scpWriter.setPool(connectionBag.getSftpWriterPool()); + return scpWriter; + case gdrive: + if (fileInfo.getSize() < FIVE_MB) { + GDriveSimpleWriter writer = new GDriveSimpleWriter(destination.getOauthDestCredential(), fileInfo); + return writer; + } else { + GDriveResumableWriter writer = new GDriveResumableWriter(destination.getOauthDestCredential(), fileInfo); + writer.setPool(connectionBag.getGoogleDriveWriterPool()); + return writer; + } + } + return null; + } + + +} diff --git a/src/main/java/org/onedatashare/transferservice/odstransferservice/service/step/box/BoxReader.java b/src/main/java/org/onedatashare/transferservice/odstransferservice/service/step/box/BoxReader.java index eae7f925..fe31032c 100644 --- a/src/main/java/org/onedatashare/transferservice/odstransferservice/service/step/box/BoxReader.java +++ b/src/main/java/org/onedatashare/transferservice/odstransferservice/service/step/box/BoxReader.java @@ -10,8 +10,6 @@ import org.onedatashare.transferservice.odstransferservice.utility.ODSUtility; import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import org.springframework.batch.core.StepExecution; -import org.springframework.batch.core.annotation.BeforeStep; import org.springframework.batch.item.support.AbstractItemCountingItemStreamItemReader; import org.springframework.util.ClassUtils; @@ -19,13 +17,13 @@ public class BoxReader extends AbstractItemCountingItemStreamItemReader { - private OAuthEndpointCredential credential; FilePartitioner filePartitioner; - private BoxAPIConnection boxAPIConnection; - private BoxFile currentFile; EntityInfo fileInfo; int retry; Logger logger = LoggerFactory.getLogger(BoxReader.class); + private final OAuthEndpointCredential credential; + private BoxAPIConnection boxAPIConnection; + private BoxFile currentFile; public BoxReader(OAuthEndpointCredential credential, EntityInfo fileInfo) { this.credential = credential; @@ -45,9 +43,9 @@ protected DataChunk doRead() { FilePart filePart = filePartitioner.nextPart(); if (filePart == null) return null; ByteArrayOutputStream byteArray = new ByteArrayOutputStream(); - if(this.fileInfo.getSize() == this.fileInfo.getChunkSize()){ + if (this.fileInfo.getSize() == this.fileInfo.getChunkSize()) { this.currentFile.download(byteArray); - }else{ + } else { this.currentFile.downloadRange(byteArray, filePart.getStart(), filePart.getEnd()); } DataChunk chunk = ODSUtility.makeChunk(filePart.getSize(), byteArray.toByteArray(), filePart.getStart(), Math.toIntExact(filePart.getPartIdx()), currentFile.getInfo().getName()); diff --git a/src/main/java/org/onedatashare/transferservice/odstransferservice/service/step/box/BoxWriterLargeFile.java b/src/main/java/org/onedatashare/transferservice/odstransferservice/service/step/box/BoxWriterLargeFile.java index a7dfc847..25781c92 100644 --- a/src/main/java/org/onedatashare/transferservice/odstransferservice/service/step/box/BoxWriterLargeFile.java +++ b/src/main/java/org/onedatashare/transferservice/odstransferservice/service/step/box/BoxWriterLargeFile.java @@ -8,7 +8,7 @@ import org.onedatashare.transferservice.odstransferservice.model.EntityInfo; import org.onedatashare.transferservice.odstransferservice.model.credential.OAuthEndpointCredential; import org.onedatashare.transferservice.odstransferservice.service.InfluxCache; -import org.onedatashare.transferservice.odstransferservice.service.cron.MetricsCollector; +import org.onedatashare.transferservice.odstransferservice.service.MetricsCollector; import org.onedatashare.transferservice.odstransferservice.service.step.ODSBaseWriter; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -34,14 +34,14 @@ */ public class BoxWriterLargeFile extends ODSBaseWriter implements ItemWriter { - private BoxAPIConnection boxAPIConnection; EntityInfo fileInfo; - private HashMap fileMap; - private HashMap digestMap; - private List parts; String destinationBasePath; BoxFolder boxFolder; Logger logger = LoggerFactory.getLogger(BoxWriterLargeFile.class); + private final BoxAPIConnection boxAPIConnection; + private final HashMap fileMap; + private final HashMap digestMap; + private final List parts; public BoxWriterLargeFile(OAuthEndpointCredential oAuthDestCredential, EntityInfo fileInfo, MetricsCollector metricsCollector, InfluxCache influxCache) { super(metricsCollector, influxCache); @@ -93,10 +93,7 @@ private void prepareForUpload(String fileName) throws NoSuchAlgorithmException { * @return */ private boolean ready(String fileName) { - if (!this.fileMap.containsKey(fileName) || !this.digestMap.containsKey(fileName)) { - return false; - } - return true; + return this.fileMap.containsKey(fileName) && this.digestMap.containsKey(fileName); } /** @@ -116,7 +113,7 @@ public void write(Chunk chunk) throws Exception { BoxFileUploadSessionPart part = session.uploadPart(dataChunk.getData(), dataChunk.getStartPosition(), Long.valueOf(dataChunk.getSize()).intValue(), this.fileInfo.getSize()); this.parts.add(part); digest.update(dataChunk.getData()); - logger.info("Current chunk in BoxLargeFile Writer " + dataChunk.toString()); + logger.info("Current chunk in BoxLargeFile Writer " + dataChunk); } this.digestMap.put(fileName, digest); diff --git a/src/main/java/org/onedatashare/transferservice/odstransferservice/service/step/box/BoxWriterSmallFile.java b/src/main/java/org/onedatashare/transferservice/odstransferservice/service/step/box/BoxWriterSmallFile.java index 6f6f6134..9a6d7707 100644 --- a/src/main/java/org/onedatashare/transferservice/odstransferservice/service/step/box/BoxWriterSmallFile.java +++ b/src/main/java/org/onedatashare/transferservice/odstransferservice/service/step/box/BoxWriterSmallFile.java @@ -1,14 +1,13 @@ package org.onedatashare.transferservice.odstransferservice.service.step.box; import com.box.sdk.BoxAPIConnection; -import com.box.sdk.BoxFileUploadSession; import com.box.sdk.BoxFolder; import org.onedatashare.transferservice.odstransferservice.model.BoxSmallFileUpload; import org.onedatashare.transferservice.odstransferservice.model.DataChunk; import org.onedatashare.transferservice.odstransferservice.model.EntityInfo; import org.onedatashare.transferservice.odstransferservice.model.credential.OAuthEndpointCredential; import org.onedatashare.transferservice.odstransferservice.service.InfluxCache; -import org.onedatashare.transferservice.odstransferservice.service.cron.MetricsCollector; +import org.onedatashare.transferservice.odstransferservice.service.MetricsCollector; import org.onedatashare.transferservice.odstransferservice.service.step.ODSBaseWriter; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -19,20 +18,19 @@ import org.springframework.batch.item.Chunk; import org.springframework.batch.item.ItemWriter; -import java.util.HashMap; import java.util.List; import static org.onedatashare.transferservice.odstransferservice.constant.ODSConstants.DEST_BASE_PATH; public class BoxWriterSmallFile extends ODSBaseWriter implements ItemWriter { - private BoxAPIConnection boxAPIConnection; EntityInfo fileInfo; String destinationBasePath; BoxFolder boxFolder; BoxSmallFileUpload smallFileUpload; - private String fileName; Logger logger = LoggerFactory.getLogger(BoxWriterSmallFile.class); + private final BoxAPIConnection boxAPIConnection; + private String fileName; public BoxWriterSmallFile(OAuthEndpointCredential credential, EntityInfo fileInfo, MetricsCollector metricsCollector, InfluxCache influxCache) { super(metricsCollector, influxCache); @@ -55,7 +53,7 @@ public void beforeStep(StepExecution stepExecution) { */ @AfterStep public ExitStatus afterStep(StepExecution stepExecution) { - boxFolder.uploadFile(this.smallFileUpload.condenseListToOneStream(this.fileInfo.getSize()), fileName); + boxFolder.uploadFile(this.smallFileUpload.condenseListToOneStream(), fileName); return stepExecution.getExitStatus(); } diff --git a/src/main/java/org/onedatashare/transferservice/odstransferservice/service/step/dropbox/DropBoxChunkedWriter.java b/src/main/java/org/onedatashare/transferservice/odstransferservice/service/step/dropbox/DropBoxChunkedWriter.java index 92cc3654..f182a65b 100644 --- a/src/main/java/org/onedatashare/transferservice/odstransferservice/service/step/dropbox/DropBoxChunkedWriter.java +++ b/src/main/java/org/onedatashare/transferservice/odstransferservice/service/step/dropbox/DropBoxChunkedWriter.java @@ -9,7 +9,7 @@ import org.onedatashare.transferservice.odstransferservice.model.DataChunk; import org.onedatashare.transferservice.odstransferservice.model.credential.OAuthEndpointCredential; import org.onedatashare.transferservice.odstransferservice.service.InfluxCache; -import org.onedatashare.transferservice.odstransferservice.service.cron.MetricsCollector; +import org.onedatashare.transferservice.odstransferservice.service.MetricsCollector; import org.onedatashare.transferservice.odstransferservice.service.step.ODSBaseWriter; import org.onedatashare.transferservice.odstransferservice.utility.ODSUtility; import org.springframework.batch.core.ExitStatus; diff --git a/src/main/java/org/onedatashare/transferservice/odstransferservice/service/step/ftp/FTPWriter.java b/src/main/java/org/onedatashare/transferservice/odstransferservice/service/step/ftp/FTPWriter.java index 184fe2c7..8c5ceafa 100644 --- a/src/main/java/org/onedatashare/transferservice/odstransferservice/service/step/ftp/FTPWriter.java +++ b/src/main/java/org/onedatashare/transferservice/odstransferservice/service/step/ftp/FTPWriter.java @@ -8,7 +8,7 @@ import org.onedatashare.transferservice.odstransferservice.model.credential.AccountEndpointCredential; import org.onedatashare.transferservice.odstransferservice.pools.FtpConnectionPool; import org.onedatashare.transferservice.odstransferservice.service.InfluxCache; -import org.onedatashare.transferservice.odstransferservice.service.cron.MetricsCollector; +import org.onedatashare.transferservice.odstransferservice.service.MetricsCollector; import org.onedatashare.transferservice.odstransferservice.service.step.ODSBaseWriter; import org.slf4j.Logger; import org.slf4j.LoggerFactory; diff --git a/src/main/java/org/onedatashare/transferservice/odstransferservice/service/step/http/HttpReader.java b/src/main/java/org/onedatashare/transferservice/odstransferservice/service/step/http/HttpReader.java index 4fbcaae2..d8b1828b 100644 --- a/src/main/java/org/onedatashare/transferservice/odstransferservice/service/step/http/HttpReader.java +++ b/src/main/java/org/onedatashare/transferservice/odstransferservice/service/step/http/HttpReader.java @@ -10,12 +10,15 @@ import org.onedatashare.transferservice.odstransferservice.pools.HttpConnectionPool; import org.onedatashare.transferservice.odstransferservice.service.FilePartitioner; import org.onedatashare.transferservice.odstransferservice.utility.ODSUtility; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.springframework.batch.core.ExitStatus; import org.springframework.batch.core.JobParameters; import org.springframework.batch.core.StepExecution; import org.springframework.batch.core.annotation.AfterStep; import org.springframework.batch.core.annotation.BeforeStep; -import org.springframework.batch.item.*; +import org.springframework.batch.item.ItemReader; +import org.springframework.batch.item.ItemStreamException; import java.io.IOException; import java.net.URI; @@ -38,6 +41,7 @@ public class HttpReader implements SetPool, ItemReader { AccountEndpointCredential sourceCred; Boolean compressable; private String uri; + Logger logger; public HttpReader(EntityInfo fileInfo, AccountEndpointCredential credential) { @@ -46,6 +50,7 @@ public HttpReader(EntityInfo fileInfo, AccountEndpointCredential credential) { this.filePartitioner = new FilePartitioner(fileInfo.getChunkSize()); this.sourceCred = credential; this.range = true; + this.logger = LoggerFactory.getLogger(HttpReader.class); } @BeforeStep @@ -92,7 +97,7 @@ public HttpRequest rangeMode(String uri, FilePart filePart, boolean valid) { } @Override - public DataChunk read() throws Exception, UnexpectedInputException, ParseException, NonTransientResourceException { + public DataChunk read() throws IOException, InterruptedException { FilePart filePart = this.filePartitioner.nextPart(); if (filePart == null) return null; HttpRequest request; @@ -102,9 +107,8 @@ public DataChunk read() throws Exception, UnexpectedInputException, ParseExcepti request = rangeMode(uri, filePart, this.range); } HttpResponse response = client.send(request, HttpResponse.BodyHandlers.ofByteArray()); - System.out.println(Thread.currentThread().toString() + "File Chunk: " + filePart.toString()); - DataChunk chunk = ODSUtility.makeChunk(response.body().length, response.body(), filePart.getStart(), Long.valueOf(filePart.getPartIdx()).intValue(), this.fileName); - return chunk; + logger.info(Thread.currentThread().toString(), "File Chunk: ", filePart.toString()); + return ODSUtility.makeChunk(response.body().length, response.body(), filePart.getStart(), Long.valueOf(filePart.getPartIdx()).intValue(), this.fileName); } public void open() throws ItemStreamException { @@ -115,20 +119,6 @@ public void open() throws ItemStreamException { } String filePath = Paths.get(fileInfo.getPath()).toString(); uri = sourceCred.getUri() + filePath; -// HttpRequest request = HttpRequest.newBuilder() -// .GET() -// .uri(URI.create(uri)) //make http a string constant as well -// .setHeader(ODSConstants.ACCEPT_ENCODING, ODSConstants.GZIP) -// .setHeader(ODSConstants.RANGE, String.format(ODSConstants.byteRange, 0, 1)) //make Range into a string constant as well as bytes -// .build(); -// HttpResponse response = null; -// try { -// response = client.send(request, HttpResponse.BodyHandlers.ofByteArray()); -// } catch (IOException | InterruptedException e) { -// throw new RuntimeException(e); -// } -// range = response.statusCode() == 206; -// compressable = response.headers().allValues(ODSConstants.CONTENT_ENCODING).size() != 0; } diff --git a/src/main/java/org/onedatashare/transferservice/odstransferservice/service/step/scp/SCPWriter.java b/src/main/java/org/onedatashare/transferservice/odstransferservice/service/step/scp/SCPWriter.java index 100a9a80..cc672f32 100644 --- a/src/main/java/org/onedatashare/transferservice/odstransferservice/service/step/scp/SCPWriter.java +++ b/src/main/java/org/onedatashare/transferservice/odstransferservice/service/step/scp/SCPWriter.java @@ -10,7 +10,7 @@ import org.onedatashare.transferservice.odstransferservice.model.SetPool; import org.onedatashare.transferservice.odstransferservice.pools.JschSessionPool; import org.onedatashare.transferservice.odstransferservice.service.InfluxCache; -import org.onedatashare.transferservice.odstransferservice.service.cron.MetricsCollector; +import org.onedatashare.transferservice.odstransferservice.service.MetricsCollector; import org.onedatashare.transferservice.odstransferservice.service.step.ODSBaseWriter; import org.slf4j.Logger; import org.slf4j.LoggerFactory; diff --git a/src/main/java/org/onedatashare/transferservice/odstransferservice/service/step/sftp/SFTPWriter.java b/src/main/java/org/onedatashare/transferservice/odstransferservice/service/step/sftp/SFTPWriter.java index 4525882c..5e842279 100644 --- a/src/main/java/org/onedatashare/transferservice/odstransferservice/service/step/sftp/SFTPWriter.java +++ b/src/main/java/org/onedatashare/transferservice/odstransferservice/service/step/sftp/SFTPWriter.java @@ -8,7 +8,7 @@ import org.onedatashare.transferservice.odstransferservice.model.credential.AccountEndpointCredential; import org.onedatashare.transferservice.odstransferservice.pools.JschSessionPool; import org.onedatashare.transferservice.odstransferservice.service.InfluxCache; -import org.onedatashare.transferservice.odstransferservice.service.cron.MetricsCollector; +import org.onedatashare.transferservice.odstransferservice.service.MetricsCollector; import org.onedatashare.transferservice.odstransferservice.service.step.ODSBaseWriter; import org.slf4j.Logger; import org.slf4j.LoggerFactory; diff --git a/src/main/java/org/onedatashare/transferservice/odstransferservice/service/step/vfs/VfsWriter.java b/src/main/java/org/onedatashare/transferservice/odstransferservice/service/step/vfs/VfsWriter.java index 5441d40b..78f2a8af 100644 --- a/src/main/java/org/onedatashare/transferservice/odstransferservice/service/step/vfs/VfsWriter.java +++ b/src/main/java/org/onedatashare/transferservice/odstransferservice/service/step/vfs/VfsWriter.java @@ -4,7 +4,7 @@ import org.onedatashare.transferservice.odstransferservice.model.EntityInfo; import org.onedatashare.transferservice.odstransferservice.model.credential.AccountEndpointCredential; import org.onedatashare.transferservice.odstransferservice.service.InfluxCache; -import org.onedatashare.transferservice.odstransferservice.service.cron.MetricsCollector; +import org.onedatashare.transferservice.odstransferservice.service.MetricsCollector; import org.onedatashare.transferservice.odstransferservice.service.step.ODSBaseWriter; import org.springframework.batch.core.ExitStatus; import org.springframework.batch.core.StepExecution; From 6c6d43e2e93baa892216f05ced685af5b1a62613 Mon Sep 17 00:00:00 2001 From: vgottipa Date: Fri, 9 Feb 2024 13:10:10 -0500 Subject: [PATCH 024/150] Publish to ECR --- .github/workflows/docker-image.yml | 18 ++++++++++++++++++ 1 file changed, 18 insertions(+) diff --git a/.github/workflows/docker-image.yml b/.github/workflows/docker-image.yml index d07d37c5..edd02ab5 100644 --- a/.github/workflows/docker-image.yml +++ b/.github/workflows/docker-image.yml @@ -33,3 +33,21 @@ jobs: push: true tags: onedatashare/transfer_service:latest + - name: Configure AWS credentials + uses: aws-actions/configure-aws-credentials@v1 + with: + aws-access-key-id: ${{ secrets.AWS_ACCESS_KEY_ID }} + aws-secret-access-key: ${{ secrets.AWS_SECRET_ACCESS_KEY }} + aws-region: us-west-1 + + - name: docker login to ECR + run: | + aws ecr get-login-password --region us-west-1 | docker login --username AWS --password-stdin ${{ secrets.AWS_REPO_URI }} + + - name: Build and push + uses: docker/build-push-action@v5 + with: + context: . + platforms: linux/amd64,linux/arm64 + push: true + tags: ${{ secrets.AWS_REPO_URI }}/onedatashare/transfer_service:latest \ No newline at end of file From 87aae39094755e4413c82f9a687f7e1ddce2509f Mon Sep 17 00:00:00 2001 From: Venkata Phani Kumar Gottipati Date: Mon, 12 Feb 2024 16:23:11 -0500 Subject: [PATCH 025/150] Add quotes to the tag --- .github/workflows/docker-image.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/docker-image.yml b/.github/workflows/docker-image.yml index edd02ab5..07d4f7d4 100644 --- a/.github/workflows/docker-image.yml +++ b/.github/workflows/docker-image.yml @@ -50,4 +50,4 @@ jobs: context: . platforms: linux/amd64,linux/arm64 push: true - tags: ${{ secrets.AWS_REPO_URI }}/onedatashare/transfer_service:latest \ No newline at end of file + tags: "${{ secrets.AWS_REPO_URI }}/onedatashare/transfer_service:latest" \ No newline at end of file From 8674adb062915402571d965daddd9997ae7456fe Mon Sep 17 00:00:00 2001 From: Venkata Phani Kumar Gottipati Date: Wed, 14 Feb 2024 10:38:21 -0500 Subject: [PATCH 026/150] Tag change --- .github/workflows/docker-image.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/docker-image.yml b/.github/workflows/docker-image.yml index 07d4f7d4..8c4085ab 100644 --- a/.github/workflows/docker-image.yml +++ b/.github/workflows/docker-image.yml @@ -50,4 +50,4 @@ jobs: context: . platforms: linux/amd64,linux/arm64 push: true - tags: "${{ secrets.AWS_REPO_URI }}/onedatashare/transfer_service:latest" \ No newline at end of file + tags: onedatashare/transfer_service:latest \ No newline at end of file From 9aab0fe3ca8cddb4aed6a7b87a7b6c0a11db2c03 Mon Sep 17 00:00:00 2001 From: Venkata Phani Kumar Gottipati Date: Wed, 14 Feb 2024 11:09:37 -0500 Subject: [PATCH 027/150] Tag referencing --- .github/workflows/docker-image.yml | 21 +++++++++++++-------- 1 file changed, 13 insertions(+), 8 deletions(-) diff --git a/.github/workflows/docker-image.yml b/.github/workflows/docker-image.yml index 8c4085ab..3daf3c34 100644 --- a/.github/workflows/docker-image.yml +++ b/.github/workflows/docker-image.yml @@ -25,7 +25,9 @@ jobs: DOCKER_PASSWORD: ${{secrets.DOCKER_PASSWORD}} run: docker login -u $DOCKER_USER -p $DOCKER_PASSWORD - - name: Build and push + + - name: Build and push dockerhub + id: dockerhub uses: docker/build-push-action@v5 with: context: . @@ -44,10 +46,13 @@ jobs: run: | aws ecr get-login-password --region us-west-1 | docker login --username AWS --password-stdin ${{ secrets.AWS_REPO_URI }} - - name: Build and push - uses: docker/build-push-action@v5 - with: - context: . - platforms: linux/amd64,linux/arm64 - push: true - tags: onedatashare/transfer_service:latest \ No newline at end of file + - name: Build and push AWS + run: | + # Retrieve the image reference from the dockerhub step + DOCKERHUB_IMAGE_REF=$(echo "${{ steps.dockerhub.outputs.images }}") + + # Tag the image for ECR + docker tag $DOCKERHUB_IMAGE_REF ${{ secrets.AWS_REPO_URI }}/onedatashare/transfer_service:latest + + # Push the image to ECR + docker push ${{ secrets.AWS_REPO_URI }}/onedatashare/transfer_service:latest From 0d7eee07c1451b265d83a3fc82cf98c4d56f6bae Mon Sep 17 00:00:00 2001 From: Venkata Phani Kumar Gottipati Date: Fri, 16 Feb 2024 13:03:51 -0500 Subject: [PATCH 028/150] ECR AWS push --- .github/workflows/docker-image.yml | 18 ++++++++---------- 1 file changed, 8 insertions(+), 10 deletions(-) diff --git a/.github/workflows/docker-image.yml b/.github/workflows/docker-image.yml index 3daf3c34..a85fedd2 100644 --- a/.github/workflows/docker-image.yml +++ b/.github/workflows/docker-image.yml @@ -45,14 +45,12 @@ jobs: - name: docker login to ECR run: | aws ecr get-login-password --region us-west-1 | docker login --username AWS --password-stdin ${{ secrets.AWS_REPO_URI }} - - - name: Build and push AWS + + - name: Build, tag, and push docker image to Amazon ECR Public + env: + REGISTRY: ${{ secrets.AWS_REPO_URI }} + REPOSITORY: onedatashare/transfer_service + IMAGE_TAG: lastest run: | - # Retrieve the image reference from the dockerhub step - DOCKERHUB_IMAGE_REF=$(echo "${{ steps.dockerhub.outputs.images }}") - - # Tag the image for ECR - docker tag $DOCKERHUB_IMAGE_REF ${{ secrets.AWS_REPO_URI }}/onedatashare/transfer_service:latest - - # Push the image to ECR - docker push ${{ secrets.AWS_REPO_URI }}/onedatashare/transfer_service:latest + docker build -t $REGISTRY/$REPOSITORY:$IMAGE_TAG . + docker push $REGISTRY/$REGISTRY_ALIAS/$REPOSITORY:$IMAGE_TAG From a5c930f100b281f3b0283f5c1cafa313f1565e14 Mon Sep 17 00:00:00 2001 From: Venkata Phani Kumar Gottipati Date: Fri, 16 Feb 2024 13:08:17 -0500 Subject: [PATCH 029/150] ECR AWS push --- .github/workflows/docker-image.yml | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/.github/workflows/docker-image.yml b/.github/workflows/docker-image.yml index a85fedd2..3c23f0d0 100644 --- a/.github/workflows/docker-image.yml +++ b/.github/workflows/docker-image.yml @@ -27,7 +27,6 @@ jobs: docker login -u $DOCKER_USER -p $DOCKER_PASSWORD - name: Build and push dockerhub - id: dockerhub uses: docker/build-push-action@v5 with: context: . @@ -46,7 +45,7 @@ jobs: run: | aws ecr get-login-password --region us-west-1 | docker login --username AWS --password-stdin ${{ secrets.AWS_REPO_URI }} - - name: Build, tag, and push docker image to Amazon ECR Public + - name: Build, tag, and push docker image to Amazon ECR Private env: REGISTRY: ${{ secrets.AWS_REPO_URI }} REPOSITORY: onedatashare/transfer_service From da2aad386ce412c8798ee91fc8fa9cce081458db Mon Sep 17 00:00:00 2001 From: Venkata Phani Kumar Gottipati Date: Fri, 16 Feb 2024 14:03:15 -0500 Subject: [PATCH 030/150] ECR AWS push --- .github/workflows/docker-image.yml | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/.github/workflows/docker-image.yml b/.github/workflows/docker-image.yml index 3c23f0d0..4637f1e5 100644 --- a/.github/workflows/docker-image.yml +++ b/.github/workflows/docker-image.yml @@ -48,8 +48,7 @@ jobs: - name: Build, tag, and push docker image to Amazon ECR Private env: REGISTRY: ${{ secrets.AWS_REPO_URI }} - REPOSITORY: onedatashare/transfer_service IMAGE_TAG: lastest run: | - docker build -t $REGISTRY/$REPOSITORY:$IMAGE_TAG . - docker push $REGISTRY/$REGISTRY_ALIAS/$REPOSITORY:$IMAGE_TAG + docker build -t $REGISTRY/onedatashare/transfer_service:$IMAGE_TAG . + docker push $REGISTRY/onedatashare/transfer_service:$IMAGE_TAG From 70c1611fd484ace6362d0179cea1a6d0aa31b7e6 Mon Sep 17 00:00:00 2001 From: Venkata Phani Kumar Gottipati Date: Fri, 16 Feb 2024 14:18:51 -0500 Subject: [PATCH 031/150] ECR AWS push --- .github/workflows/docker-image.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/docker-image.yml b/.github/workflows/docker-image.yml index 4637f1e5..a5482058 100644 --- a/.github/workflows/docker-image.yml +++ b/.github/workflows/docker-image.yml @@ -50,5 +50,5 @@ jobs: REGISTRY: ${{ secrets.AWS_REPO_URI }} IMAGE_TAG: lastest run: | - docker build -t $REGISTRY/onedatashare/transfer_service:$IMAGE_TAG . - docker push $REGISTRY/onedatashare/transfer_service:$IMAGE_TAG + docker build -t ${{ secrets.AWS_REPO_URI }}/onedatashare/transfer_service:$IMAGE_TAG . + docker push ${{ secrets.AWS_REPO_URI }}/onedatashare/transfer_service:$IMAGE_TAG From 368143b355b02ec8ae28d4eacbd873ae9a342bea Mon Sep 17 00:00:00 2001 From: Venkata Phani Kumar Gottipati Date: Fri, 16 Feb 2024 14:34:15 -0500 Subject: [PATCH 032/150] ECR AWS push --- .github/workflows/docker-image.yml | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/.github/workflows/docker-image.yml b/.github/workflows/docker-image.yml index a5482058..e0e35592 100644 --- a/.github/workflows/docker-image.yml +++ b/.github/workflows/docker-image.yml @@ -48,7 +48,7 @@ jobs: - name: Build, tag, and push docker image to Amazon ECR Private env: REGISTRY: ${{ secrets.AWS_REPO_URI }} - IMAGE_TAG: lastest + IMAGE_TAG: latest run: | - docker build -t ${{ secrets.AWS_REPO_URI }}/onedatashare/transfer_service:$IMAGE_TAG . - docker push ${{ secrets.AWS_REPO_URI }}/onedatashare/transfer_service:$IMAGE_TAG + docker build -t 148306013631.dkr.ecr.us-west-1.amazonaws.com/onedatashare/transfer_service:$IMAGE_TAG . + docker push 148306013631.dkr.ecr.us-west-1.amazonaws.com/onedatashare/transfer_service:$IMAGE_TAG From ad5216bf4fd5e805ac64a6fbf264be46781406af Mon Sep 17 00:00:00 2001 From: Venkata Phani Kumar Gottipati Date: Fri, 16 Feb 2024 14:45:49 -0500 Subject: [PATCH 033/150] ECR AWS push --- .github/workflows/docker-image.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/docker-image.yml b/.github/workflows/docker-image.yml index e0e35592..491ff1fd 100644 --- a/.github/workflows/docker-image.yml +++ b/.github/workflows/docker-image.yml @@ -50,5 +50,5 @@ jobs: REGISTRY: ${{ secrets.AWS_REPO_URI }} IMAGE_TAG: latest run: | - docker build -t 148306013631.dkr.ecr.us-west-1.amazonaws.com/onedatashare/transfer_service:$IMAGE_TAG . - docker push 148306013631.dkr.ecr.us-west-1.amazonaws.com/onedatashare/transfer_service:$IMAGE_TAG + docker build -t $REGISTRY/onedatashare/transfer_service:$IMAGE_TAG . + docker push $REGISTRY/onedatashare/transfer_service:$IMAGE_TAG From dbefd5b1b01957dae5185331e8329dd9c3d110bd Mon Sep 17 00:00:00 2001 From: Venkata Phani Kumar Gottipati Date: Sun, 18 Feb 2024 11:24:57 -0500 Subject: [PATCH 034/150] Publish to ECR --- .github/workflows/docker-image.yml | 21 +++++++++++++-------- 1 file changed, 13 insertions(+), 8 deletions(-) diff --git a/.github/workflows/docker-image.yml b/.github/workflows/docker-image.yml index 491ff1fd..a9bc37a9 100644 --- a/.github/workflows/docker-image.yml +++ b/.github/workflows/docker-image.yml @@ -40,15 +40,20 @@ jobs: aws-access-key-id: ${{ secrets.AWS_ACCESS_KEY_ID }} aws-secret-access-key: ${{ secrets.AWS_SECRET_ACCESS_KEY }} aws-region: us-west-1 - - - name: docker login to ECR - run: | - aws ecr get-login-password --region us-west-1 | docker login --username AWS --password-stdin ${{ secrets.AWS_REPO_URI }} - - name: Build, tag, and push docker image to Amazon ECR Private + - name: Login to Amazon ECR + id: login-ecr + uses: aws-actions/amazon-ecr-login@v1 + + - name: Build, tag, and push the image to Amazon ECR + id: build-image env: - REGISTRY: ${{ secrets.AWS_REPO_URI }} + ECR_REGISTRY: ${{ secrets.AWS_REPO_URI }} + ECR_REPOSITORY: onedatashare/transfer_service IMAGE_TAG: latest run: | - docker build -t $REGISTRY/onedatashare/transfer_service:$IMAGE_TAG . - docker push $REGISTRY/onedatashare/transfer_service:$IMAGE_TAG + # Build a docker container and push it to ECR + docker build -t $ECR_REGISTRY/$ECR_REPOSITORY:$IMAGE_TAG . + echo "Pushing image to ECR..." + docker push $ECR_REGISTRY/$ECR_REPOSITORY:$IMAGE_TAG + echo "::set-output name=image::$ECR_REGISTRY/$ECR_REPOSITORY:$IMAGE_TAG" From ba09aa0ded346219e9416b9fa1c3c5244296caf5 Mon Sep 17 00:00:00 2001 From: Venkata Phani Kumar Gottipati Date: Sun, 18 Feb 2024 11:33:35 -0500 Subject: [PATCH 035/150] Publish to ECR --- .github/workflows/docker-image.yml | 7 +++---- 1 file changed, 3 insertions(+), 4 deletions(-) diff --git a/.github/workflows/docker-image.yml b/.github/workflows/docker-image.yml index a9bc37a9..69e74b1d 100644 --- a/.github/workflows/docker-image.yml +++ b/.github/workflows/docker-image.yml @@ -50,10 +50,9 @@ jobs: env: ECR_REGISTRY: ${{ secrets.AWS_REPO_URI }} ECR_REPOSITORY: onedatashare/transfer_service - IMAGE_TAG: latest run: | # Build a docker container and push it to ECR - docker build -t $ECR_REGISTRY/$ECR_REPOSITORY:$IMAGE_TAG . + docker build -t $ECR_REGISTRY/$ECR_REPOSITORY . echo "Pushing image to ECR..." - docker push $ECR_REGISTRY/$ECR_REPOSITORY:$IMAGE_TAG - echo "::set-output name=image::$ECR_REGISTRY/$ECR_REPOSITORY:$IMAGE_TAG" + docker push $ECR_REGISTRY/$ECR_REPOSITORY + echo "::set-output name=image::$ECR_REGISTRY/$ECR_REPOSITORY" From dd12dafd76ceb54c525a6fbea1f82db6953da8c1 Mon Sep 17 00:00:00 2001 From: Venkata Phani Kumar Gottipati Date: Sun, 18 Feb 2024 11:35:17 -0500 Subject: [PATCH 036/150] Publish to ECR --- .github/workflows/docker-image.yml | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/.github/workflows/docker-image.yml b/.github/workflows/docker-image.yml index 69e74b1d..a9d6268f 100644 --- a/.github/workflows/docker-image.yml +++ b/.github/workflows/docker-image.yml @@ -52,7 +52,7 @@ jobs: ECR_REPOSITORY: onedatashare/transfer_service run: | # Build a docker container and push it to ECR - docker build -t $ECR_REGISTRY/$ECR_REPOSITORY . + docker build -t ${ECR_REGISTRY}/${ECR_REPOSITORY} . echo "Pushing image to ECR..." - docker push $ECR_REGISTRY/$ECR_REPOSITORY - echo "::set-output name=image::$ECR_REGISTRY/$ECR_REPOSITORY" + docker push ${ECR_REGISTRY}/${ECR_REPOSITORY} + echo "::set-output name=image::${ECR_REGISTRY}/${ECR_REPOSITORY}" From d3fb65ba2ecb78c2d5869abe6b1c250d4ac58dbe Mon Sep 17 00:00:00 2001 From: Venkata Phani Kumar Gottipati Date: Sun, 18 Feb 2024 11:38:37 -0500 Subject: [PATCH 037/150] Publish to ECR --- .github/workflows/docker-image.yml | 21 +++++++++------------ 1 file changed, 9 insertions(+), 12 deletions(-) diff --git a/.github/workflows/docker-image.yml b/.github/workflows/docker-image.yml index a9d6268f..57da1368 100644 --- a/.github/workflows/docker-image.yml +++ b/.github/workflows/docker-image.yml @@ -34,25 +34,22 @@ jobs: push: true tags: onedatashare/transfer_service:latest - - name: Configure AWS credentials + - name: Setup AWS ECR Details uses: aws-actions/configure-aws-credentials@v1 with: aws-access-key-id: ${{ secrets.AWS_ACCESS_KEY_ID }} aws-secret-access-key: ${{ secrets.AWS_SECRET_ACCESS_KEY }} aws-region: us-west-1 - + - name: Login to Amazon ECR - id: login-ecr + id: login-pf-aws-ecr uses: aws-actions/amazon-ecr-login@v1 - - - name: Build, tag, and push the image to Amazon ECR - id: build-image + + - name: Build and push the tagged docker image to Amazon ECR env: - ECR_REGISTRY: ${{ secrets.AWS_REPO_URI }} + ECR_REGISTRY: ${{ steps.login-pf-aws-ecr.outputs.registry }} ECR_REPOSITORY: onedatashare/transfer_service + IMAGE_TAG: latest run: | - # Build a docker container and push it to ECR - docker build -t ${ECR_REGISTRY}/${ECR_REPOSITORY} . - echo "Pushing image to ECR..." - docker push ${ECR_REGISTRY}/${ECR_REPOSITORY} - echo "::set-output name=image::${ECR_REGISTRY}/${ECR_REPOSITORY}" + docker build -t $ECR_REGISTRY/$ECR_REPOSITORY:$IMAGE_TAG . + docker push $ECR_REGISTRY/$ECR_REPOSITORY:$IMAGE_TAG From d40f478815566c86558bebc66943835ed3f09be2 Mon Sep 17 00:00:00 2001 From: Venkata Phani Kumar Gottipati Date: Sun, 18 Feb 2024 12:10:23 -0500 Subject: [PATCH 038/150] Publish to ECR --- .github/workflows/AWS-ECR-push.yml | 41 ++++++++++++++++++++++++ .github/workflows/Build-docker-image.yml | 35 ++++++++++++++++++++ .github/workflows/Push-dockerhub.yml | 31 ++++++++++++++++++ 3 files changed, 107 insertions(+) create mode 100644 .github/workflows/AWS-ECR-push.yml create mode 100644 .github/workflows/Build-docker-image.yml create mode 100644 .github/workflows/Push-dockerhub.yml diff --git a/.github/workflows/AWS-ECR-push.yml b/.github/workflows/AWS-ECR-push.yml new file mode 100644 index 00000000..3dde15f5 --- /dev/null +++ b/.github/workflows/AWS-ECR-push.yml @@ -0,0 +1,41 @@ +name: Push to ECR + +on: + workflow_run: + workflows: ["Push to DockerHub"] + types: + - completed + +jobs: + announce: + # This job does nothing except to bypass the atleast one independent job check in workflow + runs-on: ubuntu-latest + steps: + - name: announce + run: + echo "Pushing Image to AWS ECR..." + + push_to_ecr: + runs-on: ubuntu-latest + needs: push_to_dockerhub + steps: + - name: Setup AWS ECR Details + uses: aws-actions/configure-aws-credentials@v1 + with: + aws-access-key-id: ${{ secrets.AWS_ACCESS_KEY_ID }} + aws-secret-access-key: ${{ secrets.AWS_SECRET_ACCESS_KEY }} + aws-region: us-west-1 + + - name: Login to Amazon ECR + id: login-pf-aws-ecr + uses: aws-actions/amazon-ecr-login@v1 + + - name: Push to Amazon ECR + env: + ECR_REGISTRY: ${{ steps.login-pf-aws-ecr.outputs.registry }} + ECR_REPOSITORY: onedatashare/transfer_service + IMAGE_TAG: latest + run: | + docker pull onedatashare/transfer_service:latest + docker tag onedatashare/transfer_service:latest $ECR_REGISTRY/$ECR_REPOSITORY:$IMAGE_TAG + docker push $ECR_REGISTRY/$ECR_REPOSITORY:$IMAGE_TAG diff --git a/.github/workflows/Build-docker-image.yml b/.github/workflows/Build-docker-image.yml new file mode 100644 index 00000000..9bad565b --- /dev/null +++ b/.github/workflows/Build-docker-image.yml @@ -0,0 +1,35 @@ +name: Build Docker Image + +on: + push: + branches: [ "master" ] + pull_request: + branches: [ "master" ] + +jobs: + + build: + + runs-on: ubuntu-latest + + steps: + - name: Checkout + uses: actions/checkout@v4 + - name: Set up QEMU + uses: docker/setup-qemu-action@v3 + - name: Set up Docker Buildx + uses: docker/setup-buildx-action@v3 + - name: docker login + env: + DOCKER_USER: ${{secrets.DOCKER_USER}} + DOCKER_PASSWORD: ${{secrets.DOCKER_PASSWORD}} + run: + docker login -u $DOCKER_USER -p $DOCKER_PASSWORD + + - name: Build Docker Image + uses: docker/build-push-action@v5 + with: + context: . + platforms: linux/amd64,linux/arm64 + push: false # Do not push to DockerHub + tags: onedatashare/transfer_service:latest diff --git a/.github/workflows/Push-dockerhub.yml b/.github/workflows/Push-dockerhub.yml new file mode 100644 index 00000000..db56c5f4 --- /dev/null +++ b/.github/workflows/Push-dockerhub.yml @@ -0,0 +1,31 @@ +name: Push to DockerHub + +on: + workflow_run: + workflows: ["Build Docker Image"] + types: + - completed + +jobs: + announce: + # This job does nothing except to bypass the atleast one independent job check in workflow + runs-on: ubuntu-latest + steps: + - name: announce + run: + echo "Pushing Image to dockerhub..." + + push_to_dockerhub: + needs: build + runs-on: ubuntu-latest + steps: + - name: Checkout + uses: actions/checkout@v4 + + - name: Push to DockerHub + uses: docker/build-push-action@v5 + with: + context: . + platforms: linux/amd64,linux/arm64 + push: true + tags: onedatashare/transfer_service:latest From 147fa8603b8fb7f047ba2b4fae631ae2158de24b Mon Sep 17 00:00:00 2001 From: Venkata Phani Kumar Gottipati Date: Sun, 18 Feb 2024 12:31:20 -0500 Subject: [PATCH 039/150] Publish to ECR --- .github/workflows/AWS-ECR-push.yml | 1 - .github/workflows/Push-dockerhub.yml | 1 - .github/workflows/docker-image.yml | 55 ---------------------------- 3 files changed, 57 deletions(-) delete mode 100644 .github/workflows/docker-image.yml diff --git a/.github/workflows/AWS-ECR-push.yml b/.github/workflows/AWS-ECR-push.yml index 3dde15f5..395f4a09 100644 --- a/.github/workflows/AWS-ECR-push.yml +++ b/.github/workflows/AWS-ECR-push.yml @@ -17,7 +17,6 @@ jobs: push_to_ecr: runs-on: ubuntu-latest - needs: push_to_dockerhub steps: - name: Setup AWS ECR Details uses: aws-actions/configure-aws-credentials@v1 diff --git a/.github/workflows/Push-dockerhub.yml b/.github/workflows/Push-dockerhub.yml index db56c5f4..33644737 100644 --- a/.github/workflows/Push-dockerhub.yml +++ b/.github/workflows/Push-dockerhub.yml @@ -16,7 +16,6 @@ jobs: echo "Pushing Image to dockerhub..." push_to_dockerhub: - needs: build runs-on: ubuntu-latest steps: - name: Checkout diff --git a/.github/workflows/docker-image.yml b/.github/workflows/docker-image.yml deleted file mode 100644 index 57da1368..00000000 --- a/.github/workflows/docker-image.yml +++ /dev/null @@ -1,55 +0,0 @@ -name: Docker Image CI - -on: - push: - branches: [ "master" ] - pull_request: - branches: [ "master" ] - -jobs: - - build: - - runs-on: ubuntu-latest - - steps: - - name: Checkout - uses: actions/checkout@v4 - - name: Set up QEMU - uses: docker/setup-qemu-action@v3 - - name: Set up Docker Buildx - uses: docker/setup-buildx-action@v3 - - name: docker login - env: - DOCKER_USER: ${{secrets.DOCKER_USER}} - DOCKER_PASSWORD: ${{secrets.DOCKER_PASSWORD}} - run: - docker login -u $DOCKER_USER -p $DOCKER_PASSWORD - - - name: Build and push dockerhub - uses: docker/build-push-action@v5 - with: - context: . - platforms: linux/amd64,linux/arm64 - push: true - tags: onedatashare/transfer_service:latest - - - name: Setup AWS ECR Details - uses: aws-actions/configure-aws-credentials@v1 - with: - aws-access-key-id: ${{ secrets.AWS_ACCESS_KEY_ID }} - aws-secret-access-key: ${{ secrets.AWS_SECRET_ACCESS_KEY }} - aws-region: us-west-1 - - - name: Login to Amazon ECR - id: login-pf-aws-ecr - uses: aws-actions/amazon-ecr-login@v1 - - - name: Build and push the tagged docker image to Amazon ECR - env: - ECR_REGISTRY: ${{ steps.login-pf-aws-ecr.outputs.registry }} - ECR_REPOSITORY: onedatashare/transfer_service - IMAGE_TAG: latest - run: | - docker build -t $ECR_REGISTRY/$ECR_REPOSITORY:$IMAGE_TAG . - docker push $ECR_REGISTRY/$ECR_REPOSITORY:$IMAGE_TAG From 9bec79704bfbc7f6fed414eb4a8048d84fbf7e17 Mon Sep 17 00:00:00 2001 From: Venkata Phani Kumar Gottipati Date: Sun, 18 Feb 2024 12:39:16 -0500 Subject: [PATCH 040/150] Publish to ECR --- .github/workflows/docker-image.yml | 73 ++++++++++++++++++++++++++++++ 1 file changed, 73 insertions(+) create mode 100644 .github/workflows/docker-image.yml diff --git a/.github/workflows/docker-image.yml b/.github/workflows/docker-image.yml new file mode 100644 index 00000000..3535117d --- /dev/null +++ b/.github/workflows/docker-image.yml @@ -0,0 +1,73 @@ +name: Build and Push transfer_service Image + +on: + push: + branches: [ "master" ] + pull_request: + branches: [ "master" ] + +jobs: + + build: + runs-on: ubuntu-latest + steps: + - name: Checkout + uses: actions/checkout@v4 + - name: Set up QEMU + uses: docker/setup-qemu-action@v3 + - name: Set up Docker Buildx + uses: docker/setup-buildx-action@v3 + - name: docker login + env: + DOCKER_USER: ${{secrets.DOCKER_USER}} + DOCKER_PASSWORD: ${{secrets.DOCKER_PASSWORD}} + run: + docker login -u $DOCKER_USER -p $DOCKER_PASSWORD + + - name: Build Docker Image + uses: docker/build-push-action@v5 + with: + context: . + platforms: linux/amd64,linux/arm64 + push: false # Do not push to DockerHub + tags: onedatashare/transfer_service:latest + + push_to_dockerhub: + needs: build + runs-on: ubuntu-latest + steps: + - name: Checkout + uses: actions/checkout@v4 + + - name: Push to DockerHub + uses: docker/build-push-action@v5 + with: + context: . + platforms: linux/amd64,linux/arm64 + push: true + tags: onedatashare/transfer_service:latest + + push_to_ecr: + needs: push_to_dockerhub + runs-on: ubuntu-latest + steps: + - name: Setup AWS ECR Details + uses: aws-actions/configure-aws-credentials@v1 + with: + aws-access-key-id: ${{ secrets.AWS_ACCESS_KEY_ID }} + aws-secret-access-key: ${{ secrets.AWS_SECRET_ACCESS_KEY }} + aws-region: us-west-1 + + - name: Login to Amazon ECR + id: login-pf-aws-ecr + uses: aws-actions/amazon-ecr-login@v1 + + - name: Push to Amazon ECR + env: + ECR_REGISTRY: ${{ steps.login-pf-aws-ecr.outputs.registry }} + ECR_REPOSITORY: onedatashare/transfer_service + IMAGE_TAG: latest + run: | + docker pull onedatashare/transfer_service:latest + docker tag onedatashare/transfer_service:latest $ECR_REGISTRY/$ECR_REPOSITORY:$IMAGE_TAG + docker push $ECR_REGISTRY/$ECR_REPOSITORY:$IMAGE_TAG From cce0c67112159da2e5eb58398dc64c8b88faa9ec Mon Sep 17 00:00:00 2001 From: Venkata Phani Kumar Gottipati Date: Sun, 18 Feb 2024 12:51:20 -0500 Subject: [PATCH 041/150] Publish to ECR --- .github/workflows/AWS-ECR-push.yml | 40 ------------------------ .github/workflows/Build-docker-image.yml | 35 --------------------- .github/workflows/Push-dockerhub.yml | 30 ------------------ .github/workflows/docker-image.yml | 33 ++++++++----------- 4 files changed, 13 insertions(+), 125 deletions(-) delete mode 100644 .github/workflows/AWS-ECR-push.yml delete mode 100644 .github/workflows/Build-docker-image.yml delete mode 100644 .github/workflows/Push-dockerhub.yml diff --git a/.github/workflows/AWS-ECR-push.yml b/.github/workflows/AWS-ECR-push.yml deleted file mode 100644 index 395f4a09..00000000 --- a/.github/workflows/AWS-ECR-push.yml +++ /dev/null @@ -1,40 +0,0 @@ -name: Push to ECR - -on: - workflow_run: - workflows: ["Push to DockerHub"] - types: - - completed - -jobs: - announce: - # This job does nothing except to bypass the atleast one independent job check in workflow - runs-on: ubuntu-latest - steps: - - name: announce - run: - echo "Pushing Image to AWS ECR..." - - push_to_ecr: - runs-on: ubuntu-latest - steps: - - name: Setup AWS ECR Details - uses: aws-actions/configure-aws-credentials@v1 - with: - aws-access-key-id: ${{ secrets.AWS_ACCESS_KEY_ID }} - aws-secret-access-key: ${{ secrets.AWS_SECRET_ACCESS_KEY }} - aws-region: us-west-1 - - - name: Login to Amazon ECR - id: login-pf-aws-ecr - uses: aws-actions/amazon-ecr-login@v1 - - - name: Push to Amazon ECR - env: - ECR_REGISTRY: ${{ steps.login-pf-aws-ecr.outputs.registry }} - ECR_REPOSITORY: onedatashare/transfer_service - IMAGE_TAG: latest - run: | - docker pull onedatashare/transfer_service:latest - docker tag onedatashare/transfer_service:latest $ECR_REGISTRY/$ECR_REPOSITORY:$IMAGE_TAG - docker push $ECR_REGISTRY/$ECR_REPOSITORY:$IMAGE_TAG diff --git a/.github/workflows/Build-docker-image.yml b/.github/workflows/Build-docker-image.yml deleted file mode 100644 index 9bad565b..00000000 --- a/.github/workflows/Build-docker-image.yml +++ /dev/null @@ -1,35 +0,0 @@ -name: Build Docker Image - -on: - push: - branches: [ "master" ] - pull_request: - branches: [ "master" ] - -jobs: - - build: - - runs-on: ubuntu-latest - - steps: - - name: Checkout - uses: actions/checkout@v4 - - name: Set up QEMU - uses: docker/setup-qemu-action@v3 - - name: Set up Docker Buildx - uses: docker/setup-buildx-action@v3 - - name: docker login - env: - DOCKER_USER: ${{secrets.DOCKER_USER}} - DOCKER_PASSWORD: ${{secrets.DOCKER_PASSWORD}} - run: - docker login -u $DOCKER_USER -p $DOCKER_PASSWORD - - - name: Build Docker Image - uses: docker/build-push-action@v5 - with: - context: . - platforms: linux/amd64,linux/arm64 - push: false # Do not push to DockerHub - tags: onedatashare/transfer_service:latest diff --git a/.github/workflows/Push-dockerhub.yml b/.github/workflows/Push-dockerhub.yml deleted file mode 100644 index 33644737..00000000 --- a/.github/workflows/Push-dockerhub.yml +++ /dev/null @@ -1,30 +0,0 @@ -name: Push to DockerHub - -on: - workflow_run: - workflows: ["Build Docker Image"] - types: - - completed - -jobs: - announce: - # This job does nothing except to bypass the atleast one independent job check in workflow - runs-on: ubuntu-latest - steps: - - name: announce - run: - echo "Pushing Image to dockerhub..." - - push_to_dockerhub: - runs-on: ubuntu-latest - steps: - - name: Checkout - uses: actions/checkout@v4 - - - name: Push to DockerHub - uses: docker/build-push-action@v5 - with: - context: . - platforms: linux/amd64,linux/arm64 - push: true - tags: onedatashare/transfer_service:latest diff --git a/.github/workflows/docker-image.yml b/.github/workflows/docker-image.yml index 3535117d..49f255a3 100644 --- a/.github/workflows/docker-image.yml +++ b/.github/workflows/docker-image.yml @@ -1,4 +1,4 @@ -name: Build and Push transfer_service Image +name: Docker Image CI on: push: @@ -7,7 +7,6 @@ on: branches: [ "master" ] jobs: - build: runs-on: ubuntu-latest steps: @@ -29,27 +28,21 @@ jobs: with: context: . platforms: linux/amd64,linux/arm64 - push: false # Do not push to DockerHub + push: false # Do not push in this step tags: onedatashare/transfer_service:latest - - push_to_dockerhub: - needs: build + + push-to-dockerhub: runs-on: ubuntu-latest + needs: build steps: - - name: Checkout - uses: actions/checkout@v4 - - name: Push to DockerHub - uses: docker/build-push-action@v5 - with: - context: . - platforms: linux/amd64,linux/arm64 - push: true - tags: onedatashare/transfer_service:latest - - push_to_ecr: - needs: push_to_dockerhub + if: github.event_name == 'push' # Only push on actual push events + run: | + docker push onedatashare/transfer_service:latest + + push-to-ecr: runs-on: ubuntu-latest + needs: push-to-dockerhub steps: - name: Setup AWS ECR Details uses: aws-actions/configure-aws-credentials@v1 @@ -61,8 +54,8 @@ jobs: - name: Login to Amazon ECR id: login-pf-aws-ecr uses: aws-actions/amazon-ecr-login@v1 - - - name: Push to Amazon ECR + + - name: Build and push the tagged docker image to Amazon ECR env: ECR_REGISTRY: ${{ steps.login-pf-aws-ecr.outputs.registry }} ECR_REPOSITORY: onedatashare/transfer_service From 124084f47a6f767764b8b2f7913c96188eeb6cc9 Mon Sep 17 00:00:00 2001 From: Venkata Phani Kumar Gottipati Date: Sun, 18 Feb 2024 12:59:34 -0500 Subject: [PATCH 042/150] Publish to ECR --- .github/workflows/docker-image.yml | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/.github/workflows/docker-image.yml b/.github/workflows/docker-image.yml index 49f255a3..3d4ef7e8 100644 --- a/.github/workflows/docker-image.yml +++ b/.github/workflows/docker-image.yml @@ -30,13 +30,12 @@ jobs: platforms: linux/amd64,linux/arm64 push: false # Do not push in this step tags: onedatashare/transfer_service:latest - + push-to-dockerhub: runs-on: ubuntu-latest needs: build steps: - name: Push to DockerHub - if: github.event_name == 'push' # Only push on actual push events run: | docker push onedatashare/transfer_service:latest From d870595ba99a49a0b7ea942bc57bfa0b469bd47e Mon Sep 17 00:00:00 2001 From: Venkata Phani Kumar Gottipati Date: Sun, 18 Feb 2024 13:08:02 -0500 Subject: [PATCH 043/150] Publish to ECR --- .github/workflows/docker-image.yml | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/.github/workflows/docker-image.yml b/.github/workflows/docker-image.yml index 3d4ef7e8..8737f8fc 100644 --- a/.github/workflows/docker-image.yml +++ b/.github/workflows/docker-image.yml @@ -31,10 +31,18 @@ jobs: push: false # Do not push in this step tags: onedatashare/transfer_service:latest + - name: Save Docker Image + run: | + docker save -o transfer_service.tar onedatashare/transfer_service:latest + echo "::set-output name=image::transfer_service.tar" + id: save-image + push-to-dockerhub: runs-on: ubuntu-latest needs: build steps: + - name: Load Docker Image + run: docker load -i ${{ needs.build.outputs.save-image }} - name: Push to DockerHub run: | docker push onedatashare/transfer_service:latest From c32ec9bd7ff8cc4f4770dd2d8a51fddd61a8a838 Mon Sep 17 00:00:00 2001 From: Venkata Phani Kumar Gottipati Date: Sun, 18 Feb 2024 13:46:06 -0500 Subject: [PATCH 044/150] Publish to ECR --- .github/workflows/docker-image.yml | 21 +++++++++++++-------- 1 file changed, 13 insertions(+), 8 deletions(-) diff --git a/.github/workflows/docker-image.yml b/.github/workflows/docker-image.yml index 8737f8fc..dbd81ceb 100644 --- a/.github/workflows/docker-image.yml +++ b/.github/workflows/docker-image.yml @@ -31,20 +31,25 @@ jobs: push: false # Do not push in this step tags: onedatashare/transfer_service:latest - - name: Save Docker Image - run: | - docker save -o transfer_service.tar onedatashare/transfer_service:latest - echo "::set-output name=image::transfer_service.tar" - id: save-image - + - name: Cache Docker Image + uses: actions/upload-artifact@v2 + with: + name: docker-image + path: /var/lib/docker/image + push-to-dockerhub: runs-on: ubuntu-latest needs: build steps: - - name: Load Docker Image - run: docker load -i ${{ needs.build.outputs.save-image }} + - name: Restore Cached Docker Image + uses: actions/download-artifact@v2 + with: + name: docker-image + path: /var/lib/docker/image + - name: Push to DockerHub run: | + docker load -i /var/lib/docker/image/docker-image docker push onedatashare/transfer_service:latest push-to-ecr: From a145fab9bda1f177897efaf42a2ceb2eae708572 Mon Sep 17 00:00:00 2001 From: Venkata Phani Kumar Gottipati Date: Sun, 18 Feb 2024 13:58:09 -0500 Subject: [PATCH 045/150] Publish to ECR --- .github/workflows/docker-image.yml | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/.github/workflows/docker-image.yml b/.github/workflows/docker-image.yml index dbd81ceb..169fdc66 100644 --- a/.github/workflows/docker-image.yml +++ b/.github/workflows/docker-image.yml @@ -21,7 +21,7 @@ jobs: DOCKER_USER: ${{secrets.DOCKER_USER}} DOCKER_PASSWORD: ${{secrets.DOCKER_PASSWORD}} run: - docker login -u $DOCKER_USER -p $DOCKER_PASSWORD + sudo docker login -u $DOCKER_USER -p $DOCKER_PASSWORD - name: Build Docker Image uses: docker/build-push-action@v5 @@ -49,8 +49,8 @@ jobs: - name: Push to DockerHub run: | - docker load -i /var/lib/docker/image/docker-image - docker push onedatashare/transfer_service:latest + sudo docker load -i /var/lib/docker/image/docker-image + sudo docker push onedatashare/transfer_service:latest push-to-ecr: runs-on: ubuntu-latest From 6d430722275f195fed988cada91adccb4f0e7e73 Mon Sep 17 00:00:00 2001 From: Venkata Phani Kumar Gottipati Date: Sun, 18 Feb 2024 17:41:27 -0500 Subject: [PATCH 046/150] Publish to ECR --- .github/workflows/docker-image.yml | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/.github/workflows/docker-image.yml b/.github/workflows/docker-image.yml index 169fdc66..a9a5f402 100644 --- a/.github/workflows/docker-image.yml +++ b/.github/workflows/docker-image.yml @@ -41,6 +41,10 @@ jobs: runs-on: ubuntu-latest needs: build steps: + - name: permissions + run: | + sudo chown $USER /var/lib/docker/image + - name: Restore Cached Docker Image uses: actions/download-artifact@v2 with: From ba7b14049676a6077150e474ee1cd88b9a317cde Mon Sep 17 00:00:00 2001 From: Venkata Phani Kumar Gottipati Date: Sun, 18 Feb 2024 17:50:24 -0500 Subject: [PATCH 047/150] Publish to ECR --- .github/workflows/docker-image.yml | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/.github/workflows/docker-image.yml b/.github/workflows/docker-image.yml index a9a5f402..962ad5ed 100644 --- a/.github/workflows/docker-image.yml +++ b/.github/workflows/docker-image.yml @@ -35,7 +35,7 @@ jobs: uses: actions/upload-artifact@v2 with: name: docker-image - path: /var/lib/docker/image + path: /tmp/image push-to-dockerhub: runs-on: ubuntu-latest @@ -43,13 +43,13 @@ jobs: steps: - name: permissions run: | - sudo chown $USER /var/lib/docker/image + sudo chown $USER /tmp/image - name: Restore Cached Docker Image uses: actions/download-artifact@v2 with: name: docker-image - path: /var/lib/docker/image + path: /tmp/image - name: Push to DockerHub run: | From e7ac7b0fc2e61cd6d7ce423b98be901af9118517 Mon Sep 17 00:00:00 2001 From: Venkata Phani Kumar Gottipati Date: Sun, 18 Feb 2024 19:03:38 -0500 Subject: [PATCH 048/150] Publish to ECR --- .github/workflows/docker-image.yml | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/.github/workflows/docker-image.yml b/.github/workflows/docker-image.yml index 962ad5ed..74c2d921 100644 --- a/.github/workflows/docker-image.yml +++ b/.github/workflows/docker-image.yml @@ -31,22 +31,22 @@ jobs: push: false # Do not push in this step tags: onedatashare/transfer_service:latest + - name: Create a different file + run: touch /tmp/image/docker-image + - name: Cache Docker Image - uses: actions/upload-artifact@v2 + uses: actions/upload-artifact@v4 with: name: docker-image path: /tmp/image + overwrite: true push-to-dockerhub: runs-on: ubuntu-latest needs: build steps: - - name: permissions - run: | - sudo chown $USER /tmp/image - - name: Restore Cached Docker Image - uses: actions/download-artifact@v2 + uses: actions/download-artifact@v4 with: name: docker-image path: /tmp/image From 96782a2e62bc7e1aaad2436e45f6ec90d5493191 Mon Sep 17 00:00:00 2001 From: Venkata Phani Kumar Gottipati Date: Sun, 18 Feb 2024 19:20:34 -0500 Subject: [PATCH 049/150] Publish to ECR --- .github/workflows/docker-image.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/docker-image.yml b/.github/workflows/docker-image.yml index 74c2d921..42998140 100644 --- a/.github/workflows/docker-image.yml +++ b/.github/workflows/docker-image.yml @@ -31,8 +31,8 @@ jobs: push: false # Do not push in this step tags: onedatashare/transfer_service:latest - - name: Create a different file - run: touch /tmp/image/docker-image + - name: Create a file + run: cat "" > /tmp/image/docker-image - name: Cache Docker Image uses: actions/upload-artifact@v4 From 809ceb81a2bd1e54dd5da73a00a7e21b3d983808 Mon Sep 17 00:00:00 2001 From: Venkata Phani Kumar Gottipati Date: Sun, 18 Feb 2024 19:30:49 -0500 Subject: [PATCH 050/150] Publish to ECR --- .github/workflows/docker-image.yml | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/.github/workflows/docker-image.yml b/.github/workflows/docker-image.yml index 42998140..3596d947 100644 --- a/.github/workflows/docker-image.yml +++ b/.github/workflows/docker-image.yml @@ -32,8 +32,10 @@ jobs: tags: onedatashare/transfer_service:latest - name: Create a file - run: cat "" > /tmp/image/docker-image - + run: | + mkdir -p /tmp/image + cat "" > /tmp/image/docker-image + - name: Cache Docker Image uses: actions/upload-artifact@v4 with: From b9434cef65abce0482163c97b5388969a8a8b89d Mon Sep 17 00:00:00 2001 From: Venkata Phani Kumar Gottipati Date: Sun, 18 Feb 2024 19:40:23 -0500 Subject: [PATCH 051/150] Publish to ECR --- .github/workflows/docker-image.yml | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/.github/workflows/docker-image.yml b/.github/workflows/docker-image.yml index 3596d947..11350600 100644 --- a/.github/workflows/docker-image.yml +++ b/.github/workflows/docker-image.yml @@ -34,8 +34,8 @@ jobs: - name: Create a file run: | mkdir -p /tmp/image - cat "" > /tmp/image/docker-image - + touch /tmp/image/docker-image + - name: Cache Docker Image uses: actions/upload-artifact@v4 with: @@ -55,7 +55,7 @@ jobs: - name: Push to DockerHub run: | - sudo docker load -i /var/lib/docker/image/docker-image + sudo docker load -i /tmp/image/docker-image sudo docker push onedatashare/transfer_service:latest push-to-ecr: From d4e66c8afb0c2dcff07afde5c678966b4d54b987 Mon Sep 17 00:00:00 2001 From: Venkata Phani Kumar Gottipati Date: Sun, 18 Feb 2024 20:50:42 -0500 Subject: [PATCH 052/150] Publish to ECR --- .github/workflows/docker-image.yml | 31 ++---------------------------- 1 file changed, 2 insertions(+), 29 deletions(-) diff --git a/.github/workflows/docker-image.yml b/.github/workflows/docker-image.yml index 11350600..94461db4 100644 --- a/.github/workflows/docker-image.yml +++ b/.github/workflows/docker-image.yml @@ -28,39 +28,12 @@ jobs: with: context: . platforms: linux/amd64,linux/arm64 - push: false # Do not push in this step + push: true # Do not push in this step tags: onedatashare/transfer_service:latest - - name: Create a file - run: | - mkdir -p /tmp/image - touch /tmp/image/docker-image - - - name: Cache Docker Image - uses: actions/upload-artifact@v4 - with: - name: docker-image - path: /tmp/image - overwrite: true - - push-to-dockerhub: - runs-on: ubuntu-latest - needs: build - steps: - - name: Restore Cached Docker Image - uses: actions/download-artifact@v4 - with: - name: docker-image - path: /tmp/image - - - name: Push to DockerHub - run: | - sudo docker load -i /tmp/image/docker-image - sudo docker push onedatashare/transfer_service:latest - push-to-ecr: runs-on: ubuntu-latest - needs: push-to-dockerhub + needs: build steps: - name: Setup AWS ECR Details uses: aws-actions/configure-aws-credentials@v1 From 97fe44b92a38be4774fb0eaf0ed8830d98f1bbaa Mon Sep 17 00:00:00 2001 From: Venkata Phani Kumar Gottipati Date: Sun, 18 Feb 2024 20:56:32 -0500 Subject: [PATCH 053/150] Publish to ECR --- .github/workflows/docker-image.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/docker-image.yml b/.github/workflows/docker-image.yml index 94461db4..2a61ebf0 100644 --- a/.github/workflows/docker-image.yml +++ b/.github/workflows/docker-image.yml @@ -28,7 +28,7 @@ jobs: with: context: . platforms: linux/amd64,linux/arm64 - push: true # Do not push in this step + push: true tags: onedatashare/transfer_service:latest push-to-ecr: From f727e6d685c17570aa199e803e00c5148750d5b1 Mon Sep 17 00:00:00 2001 From: Venkata Phani Kumar Gottipati Date: Sun, 18 Feb 2024 21:08:37 -0500 Subject: [PATCH 054/150] Publish to ECR --- .github/workflows/docker-image.yml | 13 ++++--------- 1 file changed, 4 insertions(+), 9 deletions(-) diff --git a/.github/workflows/docker-image.yml b/.github/workflows/docker-image.yml index 2a61ebf0..c9d725dc 100644 --- a/.github/workflows/docker-image.yml +++ b/.github/workflows/docker-image.yml @@ -21,9 +21,9 @@ jobs: DOCKER_USER: ${{secrets.DOCKER_USER}} DOCKER_PASSWORD: ${{secrets.DOCKER_PASSWORD}} run: - sudo docker login -u $DOCKER_USER -p $DOCKER_PASSWORD + docker login -u $DOCKER_USER -p $DOCKER_PASSWORD - - name: Build Docker Image + - name: Build and push dockerhub uses: docker/build-push-action@v5 with: context: . @@ -31,10 +31,6 @@ jobs: push: true tags: onedatashare/transfer_service:latest - push-to-ecr: - runs-on: ubuntu-latest - needs: build - steps: - name: Setup AWS ECR Details uses: aws-actions/configure-aws-credentials@v1 with: @@ -52,6 +48,5 @@ jobs: ECR_REPOSITORY: onedatashare/transfer_service IMAGE_TAG: latest run: | - docker pull onedatashare/transfer_service:latest - docker tag onedatashare/transfer_service:latest $ECR_REGISTRY/$ECR_REPOSITORY:$IMAGE_TAG - docker push $ECR_REGISTRY/$ECR_REPOSITORY:$IMAGE_TAG + docker build -t $ECR_REGISTRY/$ECR_REPOSITORY:$IMAGE_TAG . + docker push $ECR_REGISTRY/$ECR_REPOSITORY:$IMAGE_TAG \ No newline at end of file From 65055272496e8d16be0f0560a5c85a462cc2196b Mon Sep 17 00:00:00 2001 From: Venkata Phani Kumar Gottipati Date: Sun, 18 Feb 2024 21:21:37 -0500 Subject: [PATCH 055/150] Publish to ECR --- .github/workflows/docker-image.yml | 10 +++++++--- 1 file changed, 7 insertions(+), 3 deletions(-) diff --git a/.github/workflows/docker-image.yml b/.github/workflows/docker-image.yml index c9d725dc..280d1436 100644 --- a/.github/workflows/docker-image.yml +++ b/.github/workflows/docker-image.yml @@ -31,8 +31,11 @@ jobs: push: true tags: onedatashare/transfer_service:latest + push-to-ecr: + runs-on: ubuntu-latest + steps: - name: Setup AWS ECR Details - uses: aws-actions/configure-aws-credentials@v1 + uses: aws-actions/configure-aws-credentials@v3 with: aws-access-key-id: ${{ secrets.AWS_ACCESS_KEY_ID }} aws-secret-access-key: ${{ secrets.AWS_SECRET_ACCESS_KEY }} @@ -40,7 +43,7 @@ jobs: - name: Login to Amazon ECR id: login-pf-aws-ecr - uses: aws-actions/amazon-ecr-login@v1 + uses: aws-actions/amazon-ecr-login@v2 - name: Build and push the tagged docker image to Amazon ECR env: @@ -48,5 +51,6 @@ jobs: ECR_REPOSITORY: onedatashare/transfer_service IMAGE_TAG: latest run: | - docker build -t $ECR_REGISTRY/$ECR_REPOSITORY:$IMAGE_TAG . + docker pull onedatashare/transfer_service:latest + docker build -t $ECR_REGISTRY/$ECR_REPOSITORY:$IMAGE_TAG onedatashare/transfer_service:latest docker push $ECR_REGISTRY/$ECR_REPOSITORY:$IMAGE_TAG \ No newline at end of file From d8872104e4ec319cff28d5fcc8b31d3a120759e5 Mon Sep 17 00:00:00 2001 From: Venkata Phani Kumar Gottipati Date: Sun, 18 Feb 2024 21:22:17 -0500 Subject: [PATCH 056/150] Publish to ECR --- .github/workflows/docker-image.yml | 1 + 1 file changed, 1 insertion(+) diff --git a/.github/workflows/docker-image.yml b/.github/workflows/docker-image.yml index 280d1436..6d52553e 100644 --- a/.github/workflows/docker-image.yml +++ b/.github/workflows/docker-image.yml @@ -32,6 +32,7 @@ jobs: tags: onedatashare/transfer_service:latest push-to-ecr: + needs: build runs-on: ubuntu-latest steps: - name: Setup AWS ECR Details From 9ff47aaf50d12ad5ab4ab6a41f64dd523391ad54 Mon Sep 17 00:00:00 2001 From: Venkata Phani Kumar Gottipati Date: Sun, 18 Feb 2024 21:34:21 -0500 Subject: [PATCH 057/150] Publish to ECR --- .github/workflows/docker-image.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/docker-image.yml b/.github/workflows/docker-image.yml index 6d52553e..fa0a6d99 100644 --- a/.github/workflows/docker-image.yml +++ b/.github/workflows/docker-image.yml @@ -53,5 +53,5 @@ jobs: IMAGE_TAG: latest run: | docker pull onedatashare/transfer_service:latest - docker build -t $ECR_REGISTRY/$ECR_REPOSITORY:$IMAGE_TAG onedatashare/transfer_service:latest + docker tag $ECR_REGISTRY/$ECR_REPOSITORY:$IMAGE_TAG onedatashare/transfer_service:latest docker push $ECR_REGISTRY/$ECR_REPOSITORY:$IMAGE_TAG \ No newline at end of file From 5a835d9ef4d9da61027728c8efe7dd31cd68673c Mon Sep 17 00:00:00 2001 From: Venkata Phani Kumar Gottipati Date: Sun, 18 Feb 2024 21:43:41 -0500 Subject: [PATCH 058/150] Publish to ECR --- .github/workflows/docker-image.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/docker-image.yml b/.github/workflows/docker-image.yml index fa0a6d99..aac9a5c6 100644 --- a/.github/workflows/docker-image.yml +++ b/.github/workflows/docker-image.yml @@ -53,5 +53,5 @@ jobs: IMAGE_TAG: latest run: | docker pull onedatashare/transfer_service:latest - docker tag $ECR_REGISTRY/$ECR_REPOSITORY:$IMAGE_TAG onedatashare/transfer_service:latest + docker tag onedatashare/transfer_service:latest $ECR_REGISTRY/$ECR_REPOSITORY:$IMAGE_TAG docker push $ECR_REGISTRY/$ECR_REPOSITORY:$IMAGE_TAG \ No newline at end of file From 243e337c94e9929f38ce17580285299dae6575ab Mon Sep 17 00:00:00 2001 From: Jacob Goldverg Date: Mon, 19 Feb 2024 16:23:55 -0500 Subject: [PATCH 059/150] fixed routing key issue --- src/main/resources/application.properties | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/main/resources/application.properties b/src/main/resources/application.properties index 5267035a..451301b0 100644 --- a/src/main/resources/application.properties +++ b/src/main/resources/application.properties @@ -26,7 +26,7 @@ ods.rabbitmq.exchange=ods.exchange #for vfs nodes this should be the APP_NAME which is always lowercase. ods.rabbitmq.queue=${CONNECTOR_QUEUE:transferQueue} -ods.rabbitmq.routingkey=${CONNECTOR_QUEUE:ods.routing} +ods.rabbitmq.routingkey=ods.routing spring.rabbitmq.addresses=${AMPQ_ADDRESS} spring.rabbitmq.port=${AMPQ_PORT:5672} spring.rabbitmq.username=${AMPQ_USER:guest} From 5d2c284a2767eaf26fb0dc2248af387b5dab3e0f Mon Sep 17 00:00:00 2001 From: Venkata Phani Kumar Gottipati Date: Tue, 20 Feb 2024 16:59:59 -0500 Subject: [PATCH 060/150] ECR AWS push --- .github/workflows/docker-image.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/docker-image.yml b/.github/workflows/docker-image.yml index aac9a5c6..75cffb66 100644 --- a/.github/workflows/docker-image.yml +++ b/.github/workflows/docker-image.yml @@ -29,7 +29,7 @@ jobs: context: . platforms: linux/amd64,linux/arm64 push: true - tags: onedatashare/transfer_service:latest + tags: onedatashare/transfer_service:${{ github.event.release.tag_name }} push-to-ecr: needs: build @@ -50,7 +50,7 @@ jobs: env: ECR_REGISTRY: ${{ steps.login-pf-aws-ecr.outputs.registry }} ECR_REPOSITORY: onedatashare/transfer_service - IMAGE_TAG: latest + IMAGE_TAG: ${{ github.event.release.tag_name }} run: | docker pull onedatashare/transfer_service:latest docker tag onedatashare/transfer_service:latest $ECR_REGISTRY/$ECR_REPOSITORY:$IMAGE_TAG From 0f16c5d72641ce947ee492c8f3a30431e879385c Mon Sep 17 00:00:00 2001 From: Venkata Phani Kumar Gottipati Date: Tue, 20 Feb 2024 17:04:30 -0500 Subject: [PATCH 061/150] ECR AWS push --- .github/workflows/docker-image.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/docker-image.yml b/.github/workflows/docker-image.yml index 75cffb66..314e9ae5 100644 --- a/.github/workflows/docker-image.yml +++ b/.github/workflows/docker-image.yml @@ -29,7 +29,7 @@ jobs: context: . platforms: linux/amd64,linux/arm64 push: true - tags: onedatashare/transfer_service:${{ github.event.release.tag_name }} + tags: onedatashare/transfer_service:${{ github.ref }} push-to-ecr: needs: build @@ -50,7 +50,7 @@ jobs: env: ECR_REGISTRY: ${{ steps.login-pf-aws-ecr.outputs.registry }} ECR_REPOSITORY: onedatashare/transfer_service - IMAGE_TAG: ${{ github.event.release.tag_name }} + IMAGE_TAG: ${{ github.ref }} run: | docker pull onedatashare/transfer_service:latest docker tag onedatashare/transfer_service:latest $ECR_REGISTRY/$ECR_REPOSITORY:$IMAGE_TAG From 1ad85a2d99ea330f520f63a13d591b752f2d4555 Mon Sep 17 00:00:00 2001 From: Venkata Phani Kumar Gottipati Date: Tue, 20 Feb 2024 17:16:58 -0500 Subject: [PATCH 062/150] ECR AWS push --- .github/workflows/docker-image.yml | 22 +++++++++++++++------- 1 file changed, 15 insertions(+), 7 deletions(-) diff --git a/.github/workflows/docker-image.yml b/.github/workflows/docker-image.yml index 314e9ae5..0a1ddbde 100644 --- a/.github/workflows/docker-image.yml +++ b/.github/workflows/docker-image.yml @@ -2,13 +2,18 @@ name: Docker Image CI on: push: - branches: [ "master" ] + branches: + - master + - 'refs/tags/*' pull_request: - branches: [ "master" ] + branches: + - master jobs: build: runs-on: ubuntu-latest + env: + TAG_NAME: "" steps: - name: Checkout uses: actions/checkout@v4 @@ -16,6 +21,10 @@ jobs: uses: docker/setup-qemu-action@v3 - name: Set up Docker Buildx uses: docker/setup-buildx-action@v3 + - name: Extract Tag Name + run: | + TAG_NAME=$(echo ${{ github.ref }} | sed 's|refs/tags/||') + echo "TAG_NAME=$TAG_NAME" >> $GITHUB_ENV - name: docker login env: DOCKER_USER: ${{secrets.DOCKER_USER}} @@ -29,7 +38,7 @@ jobs: context: . platforms: linux/amd64,linux/arm64 push: true - tags: onedatashare/transfer_service:${{ github.ref }} + tags: onedatashare/transfer_service:${TAG_NAME} push-to-ecr: needs: build @@ -50,8 +59,7 @@ jobs: env: ECR_REGISTRY: ${{ steps.login-pf-aws-ecr.outputs.registry }} ECR_REPOSITORY: onedatashare/transfer_service - IMAGE_TAG: ${{ github.ref }} run: | - docker pull onedatashare/transfer_service:latest - docker tag onedatashare/transfer_service:latest $ECR_REGISTRY/$ECR_REPOSITORY:$IMAGE_TAG - docker push $ECR_REGISTRY/$ECR_REPOSITORY:$IMAGE_TAG \ No newline at end of file + docker pull onedatashare/transfer_service:${TAG_NAME} + docker tag onedatashare/transfer_service:${TAG_NAME} $ECR_REGISTRY/$ECR_REPOSITORY:${TAG_NAME} + docker push $ECR_REGISTRY/$ECR_REPOSITORY:${TAG_NAME} \ No newline at end of file From 26632bd51a325f9c05b9740e2a6e4e6fd5a4c4e3 Mon Sep 17 00:00:00 2001 From: Venkata Phani Kumar Gottipati Date: Tue, 20 Feb 2024 17:20:03 -0500 Subject: [PATCH 063/150] ECR AWS push --- .github/workflows/docker-image.yml | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/.github/workflows/docker-image.yml b/.github/workflows/docker-image.yml index 0a1ddbde..8551694a 100644 --- a/.github/workflows/docker-image.yml +++ b/.github/workflows/docker-image.yml @@ -43,6 +43,8 @@ jobs: push-to-ecr: needs: build runs-on: ubuntu-latest + env: + TAG_NAME: ${{ needs.build.outputs.TAG_NAME }} steps: - name: Setup AWS ECR Details uses: aws-actions/configure-aws-credentials@v3 @@ -62,4 +64,4 @@ jobs: run: | docker pull onedatashare/transfer_service:${TAG_NAME} docker tag onedatashare/transfer_service:${TAG_NAME} $ECR_REGISTRY/$ECR_REPOSITORY:${TAG_NAME} - docker push $ECR_REGISTRY/$ECR_REPOSITORY:${TAG_NAME} \ No newline at end of file + docker push $ECR_REGISTRY/$ECR_REPOSITORY:${TAG_NAME} From a965c90fddbcd65c44c59122848d478175ed8118 Mon Sep 17 00:00:00 2001 From: Venkata Phani Kumar Gottipati Date: Tue, 20 Feb 2024 17:24:30 -0500 Subject: [PATCH 064/150] ECR AWS push --- .github/workflows/docker-image.yml | 2 ++ 1 file changed, 2 insertions(+) diff --git a/.github/workflows/docker-image.yml b/.github/workflows/docker-image.yml index 8551694a..594460bc 100644 --- a/.github/workflows/docker-image.yml +++ b/.github/workflows/docker-image.yml @@ -25,6 +25,8 @@ jobs: run: | TAG_NAME=$(echo ${{ github.ref }} | sed 's|refs/tags/||') echo "TAG_NAME=$TAG_NAME" >> $GITHUB_ENV + - name: Debug Tag Name + run: echo "TAG_NAME=${TAG_NAME}" - name: docker login env: DOCKER_USER: ${{secrets.DOCKER_USER}} From c07a7596bf1445f72d95c667e2cf4e1097d6ccab Mon Sep 17 00:00:00 2001 From: Venkata Phani Kumar Gottipati Date: Tue, 20 Feb 2024 17:29:45 -0500 Subject: [PATCH 065/150] ECR AWS push --- .github/workflows/docker-image.yml | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/.github/workflows/docker-image.yml b/.github/workflows/docker-image.yml index 594460bc..53f31a4d 100644 --- a/.github/workflows/docker-image.yml +++ b/.github/workflows/docker-image.yml @@ -25,8 +25,6 @@ jobs: run: | TAG_NAME=$(echo ${{ github.ref }} | sed 's|refs/tags/||') echo "TAG_NAME=$TAG_NAME" >> $GITHUB_ENV - - name: Debug Tag Name - run: echo "TAG_NAME=${TAG_NAME}" - name: docker login env: DOCKER_USER: ${{secrets.DOCKER_USER}} @@ -36,11 +34,13 @@ jobs: - name: Build and push dockerhub uses: docker/build-push-action@v5 + env: + TAG_NAME: ${{ needs.build.outputs.TAG_NAME }} with: context: . platforms: linux/amd64,linux/arm64 push: true - tags: onedatashare/transfer_service:${TAG_NAME} + tags: onedatashare/transfer_service:$TAG_NAME push-to-ecr: needs: build @@ -64,6 +64,6 @@ jobs: ECR_REGISTRY: ${{ steps.login-pf-aws-ecr.outputs.registry }} ECR_REPOSITORY: onedatashare/transfer_service run: | - docker pull onedatashare/transfer_service:${TAG_NAME} - docker tag onedatashare/transfer_service:${TAG_NAME} $ECR_REGISTRY/$ECR_REPOSITORY:${TAG_NAME} - docker push $ECR_REGISTRY/$ECR_REPOSITORY:${TAG_NAME} + docker pull onedatashare/transfer_service:$TAG_NAME + docker tag onedatashare/transfer_service:$TAG_NAME $ECR_REGISTRY/$ECR_REPOSITORY:$TAG_NAME + docker push $ECR_REGISTRY/$ECR_REPOSITORY:$TAG_NAME From d91c08dded8b2146609fc1391301e9dbb418de45 Mon Sep 17 00:00:00 2001 From: Venkata Phani Kumar Gottipati Date: Tue, 20 Feb 2024 17:36:36 -0500 Subject: [PATCH 066/150] ECR AWS push --- .github/workflows/docker-image.yml | 26 +++++++------------------- 1 file changed, 7 insertions(+), 19 deletions(-) diff --git a/.github/workflows/docker-image.yml b/.github/workflows/docker-image.yml index 53f31a4d..cccd9172 100644 --- a/.github/workflows/docker-image.yml +++ b/.github/workflows/docker-image.yml @@ -2,18 +2,13 @@ name: Docker Image CI on: push: - branches: - - master - - 'refs/tags/*' + branches: [ "master" ] pull_request: - branches: - - master + branches: [ "master" ] jobs: build: runs-on: ubuntu-latest - env: - TAG_NAME: "" steps: - name: Checkout uses: actions/checkout@v4 @@ -21,10 +16,6 @@ jobs: uses: docker/setup-qemu-action@v3 - name: Set up Docker Buildx uses: docker/setup-buildx-action@v3 - - name: Extract Tag Name - run: | - TAG_NAME=$(echo ${{ github.ref }} | sed 's|refs/tags/||') - echo "TAG_NAME=$TAG_NAME" >> $GITHUB_ENV - name: docker login env: DOCKER_USER: ${{secrets.DOCKER_USER}} @@ -34,19 +25,15 @@ jobs: - name: Build and push dockerhub uses: docker/build-push-action@v5 - env: - TAG_NAME: ${{ needs.build.outputs.TAG_NAME }} with: context: . platforms: linux/amd64,linux/arm64 push: true - tags: onedatashare/transfer_service:$TAG_NAME + tags: onedatashare/transfer_service:v1.${{ github.run_number }} push-to-ecr: needs: build runs-on: ubuntu-latest - env: - TAG_NAME: ${{ needs.build.outputs.TAG_NAME }} steps: - name: Setup AWS ECR Details uses: aws-actions/configure-aws-credentials@v3 @@ -63,7 +50,8 @@ jobs: env: ECR_REGISTRY: ${{ steps.login-pf-aws-ecr.outputs.registry }} ECR_REPOSITORY: onedatashare/transfer_service + IMAGE_TAG: v1.${{ github.run_number }} run: | - docker pull onedatashare/transfer_service:$TAG_NAME - docker tag onedatashare/transfer_service:$TAG_NAME $ECR_REGISTRY/$ECR_REPOSITORY:$TAG_NAME - docker push $ECR_REGISTRY/$ECR_REPOSITORY:$TAG_NAME + docker pull onedatashare/transfer_service:v1.${{ github.run_number }} + docker tag onedatashare/transfer_service:v1.${{ github.run_number }} $ECR_REGISTRY/$ECR_REPOSITORY:$IMAGE_TAG + docker push $ECR_REGISTRY/$ECR_REPOSITORY:$IMAGE_TAG \ No newline at end of file From 7c500e80be793b8d46970eba101965d74700bd50 Mon Sep 17 00:00:00 2001 From: Venkata Phani Kumar Gottipati Date: Tue, 20 Feb 2024 17:52:15 -0500 Subject: [PATCH 067/150] ECR AWS push --- .github/workflows/docker-image.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/docker-image.yml b/.github/workflows/docker-image.yml index cccd9172..26a42ef4 100644 --- a/.github/workflows/docker-image.yml +++ b/.github/workflows/docker-image.yml @@ -52,6 +52,6 @@ jobs: ECR_REPOSITORY: onedatashare/transfer_service IMAGE_TAG: v1.${{ github.run_number }} run: | - docker pull onedatashare/transfer_service:v1.${{ github.run_number }} - docker tag onedatashare/transfer_service:v1.${{ github.run_number }} $ECR_REGISTRY/$ECR_REPOSITORY:$IMAGE_TAG + docker pull onedatashare/transfer_service:$IMAGE_TAG + docker tag onedatashare/transfer_service:$IMAGE_TAG $ECR_REGISTRY/$ECR_REPOSITORY:$IMAGE_TAG docker push $ECR_REGISTRY/$ECR_REPOSITORY:$IMAGE_TAG \ No newline at end of file From fa65e3bbc446a82276becee36e04ea6c1641347c Mon Sep 17 00:00:00 2001 From: Venkata Phani Kumar Gottipati Date: Fri, 23 Feb 2024 10:18:41 -0500 Subject: [PATCH 068/150] Publish to ECR --- .github/workflows/docker-image.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/docker-image.yml b/.github/workflows/docker-image.yml index aac9a5c6..fa8ea9de 100644 --- a/.github/workflows/docker-image.yml +++ b/.github/workflows/docker-image.yml @@ -29,7 +29,7 @@ jobs: context: . platforms: linux/amd64,linux/arm64 push: true - tags: onedatashare/transfer_service:latest + tags: onedatashare/transfer_service:$GITHUB_REF_NAME push-to-ecr: needs: build @@ -50,7 +50,7 @@ jobs: env: ECR_REGISTRY: ${{ steps.login-pf-aws-ecr.outputs.registry }} ECR_REPOSITORY: onedatashare/transfer_service - IMAGE_TAG: latest + IMAGE_TAG: $GITHUB_REF_NAME run: | docker pull onedatashare/transfer_service:latest docker tag onedatashare/transfer_service:latest $ECR_REGISTRY/$ECR_REPOSITORY:$IMAGE_TAG From 264306ad6641b61298bf4349340dfd620b9ad2de Mon Sep 17 00:00:00 2001 From: Venkata Phani Kumar Gottipati Date: Fri, 23 Feb 2024 10:20:32 -0500 Subject: [PATCH 069/150] Publish to ECR --- .github/workflows/docker-image.yml | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/.github/workflows/docker-image.yml b/.github/workflows/docker-image.yml index a872337b..64fbc77d 100644 --- a/.github/workflows/docker-image.yml +++ b/.github/workflows/docker-image.yml @@ -29,8 +29,8 @@ jobs: context: . platforms: linux/amd64,linux/arm64 push: true - tags: onedatashare/transfer_service:$GITHUB_REF_NAME - + tags: onedatashare/transfer_service:${{ github.ref_name }} + push-to-ecr: needs: build runs-on: ubuntu-latest @@ -50,7 +50,7 @@ jobs: env: ECR_REGISTRY: ${{ steps.login-pf-aws-ecr.outputs.registry }} ECR_REPOSITORY: onedatashare/transfer_service - IMAGE_TAG: $GITHUB_REF_NAME + IMAGE_TAG: ${{ github.ref_name }} run: | docker pull onedatashare/transfer_service:$IMAGE_TAG docker tag onedatashare/transfer_service:$IMAGE_TAG $ECR_REGISTRY/$ECR_REPOSITORY:$IMAGE_TAG From 58b918efbd74c4f893713493b9b65e86cd4fd1d4 Mon Sep 17 00:00:00 2001 From: Venkata Phani Kumar Gottipati Date: Fri, 23 Feb 2024 10:34:08 -0500 Subject: [PATCH 070/150] Publish to ECR --- .github/workflows/docker-image.yml | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/.github/workflows/docker-image.yml b/.github/workflows/docker-image.yml index 64fbc77d..607a31dc 100644 --- a/.github/workflows/docker-image.yml +++ b/.github/workflows/docker-image.yml @@ -29,8 +29,8 @@ jobs: context: . platforms: linux/amd64,linux/arm64 push: true - tags: onedatashare/transfer_service:${{ github.ref_name }} - + tags: onedatashare/transfer_service:${{ github.event.release.tag_name }} + push-to-ecr: needs: build runs-on: ubuntu-latest @@ -50,7 +50,7 @@ jobs: env: ECR_REGISTRY: ${{ steps.login-pf-aws-ecr.outputs.registry }} ECR_REPOSITORY: onedatashare/transfer_service - IMAGE_TAG: ${{ github.ref_name }} + IMAGE_TAG: ${{ github.event.release.tag_name }} run: | docker pull onedatashare/transfer_service:$IMAGE_TAG docker tag onedatashare/transfer_service:$IMAGE_TAG $ECR_REGISTRY/$ECR_REPOSITORY:$IMAGE_TAG From 78c6cfdfedfd17aa2e8fc6bee6c46e23ab88b661 Mon Sep 17 00:00:00 2001 From: Venkata Phani Kumar Gottipati Date: Fri, 23 Feb 2024 10:47:06 -0500 Subject: [PATCH 071/150] Publish to ECR --- .github/workflows/docker-image.yml | 13 ++++++++++--- 1 file changed, 10 insertions(+), 3 deletions(-) diff --git a/.github/workflows/docker-image.yml b/.github/workflows/docker-image.yml index 607a31dc..ed892cec 100644 --- a/.github/workflows/docker-image.yml +++ b/.github/workflows/docker-image.yml @@ -1,6 +1,8 @@ name: Docker Image CI on: + tags: + - '*' push: branches: [ "master" ] pull_request: @@ -22,14 +24,19 @@ jobs: DOCKER_PASSWORD: ${{secrets.DOCKER_PASSWORD}} run: docker login -u $DOCKER_USER -p $DOCKER_PASSWORD - + - name: Get tag + id: tag + uses: dawidd6/action-get-tag@v1 + with: + # Optionally strip `v` prefix + strip_v: false - name: Build and push dockerhub uses: docker/build-push-action@v5 with: context: . platforms: linux/amd64,linux/arm64 push: true - tags: onedatashare/transfer_service:${{ github.event.release.tag_name }} + tags: onedatashare/transfer_service:${{steps.tag.outputs.tag}} push-to-ecr: needs: build @@ -50,7 +57,7 @@ jobs: env: ECR_REGISTRY: ${{ steps.login-pf-aws-ecr.outputs.registry }} ECR_REPOSITORY: onedatashare/transfer_service - IMAGE_TAG: ${{ github.event.release.tag_name }} + IMAGE_TAG: ${{steps.tag.outputs.tag}} run: | docker pull onedatashare/transfer_service:$IMAGE_TAG docker tag onedatashare/transfer_service:$IMAGE_TAG $ECR_REGISTRY/$ECR_REPOSITORY:$IMAGE_TAG From 854f8eb2586345a2dc8edfe5162ffac29ab6a86d Mon Sep 17 00:00:00 2001 From: Venkata Phani Kumar Gottipati Date: Fri, 23 Feb 2024 10:48:28 -0500 Subject: [PATCH 072/150] Publish to ECR --- .github/workflows/docker-image.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/docker-image.yml b/.github/workflows/docker-image.yml index ed892cec..61de0ce8 100644 --- a/.github/workflows/docker-image.yml +++ b/.github/workflows/docker-image.yml @@ -1,9 +1,9 @@ name: Docker Image CI on: - tags: - - '*' push: + tags: + - '*' branches: [ "master" ] pull_request: branches: [ "master" ] From c5978afd4a5f60b8ff243281f1c311831b9dd924 Mon Sep 17 00:00:00 2001 From: Venkata Phani Kumar Gottipati Date: Sun, 10 Mar 2024 20:46:46 -0400 Subject: [PATCH 073/150] Publish to ECR --- .github/workflows/docker-image.yml | 11 ++++------- 1 file changed, 4 insertions(+), 7 deletions(-) diff --git a/.github/workflows/docker-image.yml b/.github/workflows/docker-image.yml index 61de0ce8..29b604c5 100644 --- a/.github/workflows/docker-image.yml +++ b/.github/workflows/docker-image.yml @@ -24,19 +24,16 @@ jobs: DOCKER_PASSWORD: ${{secrets.DOCKER_PASSWORD}} run: docker login -u $DOCKER_USER -p $DOCKER_PASSWORD - - name: Get tag + - name: Get tag from GITHUB_REF id: tag - uses: dawidd6/action-get-tag@v1 - with: - # Optionally strip `v` prefix - strip_v: false + run: echo "TAG=${GITHUB_REF#refs/tags/}" >> $GITHUB_ENV - name: Build and push dockerhub uses: docker/build-push-action@v5 with: context: . platforms: linux/amd64,linux/arm64 push: true - tags: onedatashare/transfer_service:${{steps.tag.outputs.tag}} + tags: onedatashare/transfer_service:${{steps.tag.outputs.TAG}} push-to-ecr: needs: build @@ -57,7 +54,7 @@ jobs: env: ECR_REGISTRY: ${{ steps.login-pf-aws-ecr.outputs.registry }} ECR_REPOSITORY: onedatashare/transfer_service - IMAGE_TAG: ${{steps.tag.outputs.tag}} + IMAGE_TAG: ${{steps.tag.outputs.TAG}} run: | docker pull onedatashare/transfer_service:$IMAGE_TAG docker tag onedatashare/transfer_service:$IMAGE_TAG $ECR_REGISTRY/$ECR_REPOSITORY:$IMAGE_TAG From 5878af0ee051507875ec9ebf027acbdf593614ca Mon Sep 17 00:00:00 2001 From: Venkata Phani Kumar Gottipati Date: Sun, 10 Mar 2024 20:49:16 -0400 Subject: [PATCH 074/150] Publish to ECR --- .github/workflows/docker-image.yml | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/.github/workflows/docker-image.yml b/.github/workflows/docker-image.yml index 29b604c5..6c4472de 100644 --- a/.github/workflows/docker-image.yml +++ b/.github/workflows/docker-image.yml @@ -25,7 +25,6 @@ jobs: run: docker login -u $DOCKER_USER -p $DOCKER_PASSWORD - name: Get tag from GITHUB_REF - id: tag run: echo "TAG=${GITHUB_REF#refs/tags/}" >> $GITHUB_ENV - name: Build and push dockerhub uses: docker/build-push-action@v5 @@ -33,7 +32,7 @@ jobs: context: . platforms: linux/amd64,linux/arm64 push: true - tags: onedatashare/transfer_service:${{steps.tag.outputs.TAG}} + tags: onedatashare/transfer_service:${TAG} push-to-ecr: needs: build @@ -54,7 +53,7 @@ jobs: env: ECR_REGISTRY: ${{ steps.login-pf-aws-ecr.outputs.registry }} ECR_REPOSITORY: onedatashare/transfer_service - IMAGE_TAG: ${{steps.tag.outputs.TAG}} + IMAGE_TAG: ${{ env.TAG }} run: | docker pull onedatashare/transfer_service:$IMAGE_TAG docker tag onedatashare/transfer_service:$IMAGE_TAG $ECR_REGISTRY/$ECR_REPOSITORY:$IMAGE_TAG From f270df84c94e6ca8230c4000818c90ee5c86aec6 Mon Sep 17 00:00:00 2001 From: Venkata Phani Kumar Gottipati Date: Sun, 10 Mar 2024 20:50:43 -0400 Subject: [PATCH 075/150] Publish to ECR --- .github/workflows/docker-image.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/docker-image.yml b/.github/workflows/docker-image.yml index 6c4472de..76b7a035 100644 --- a/.github/workflows/docker-image.yml +++ b/.github/workflows/docker-image.yml @@ -32,7 +32,7 @@ jobs: context: . platforms: linux/amd64,linux/arm64 push: true - tags: onedatashare/transfer_service:${TAG} + tags: onedatashare/transfer_service:${{env.TAG}} push-to-ecr: needs: build From 8bee73f5a4b747a26a6fdebc40e4f13e34c43f66 Mon Sep 17 00:00:00 2001 From: Venkata Phani Kumar Gottipati Date: Sun, 10 Mar 2024 21:09:56 -0400 Subject: [PATCH 076/150] Publish to ECR --- .github/workflows/docker-image.yml | 14 +++++++++----- 1 file changed, 9 insertions(+), 5 deletions(-) diff --git a/.github/workflows/docker-image.yml b/.github/workflows/docker-image.yml index 76b7a035..82bf680b 100644 --- a/.github/workflows/docker-image.yml +++ b/.github/workflows/docker-image.yml @@ -3,7 +3,7 @@ name: Docker Image CI on: push: tags: - - '*' + - 'v*.*.*-beta' branches: [ "master" ] pull_request: branches: [ "master" ] @@ -24,15 +24,19 @@ jobs: DOCKER_PASSWORD: ${{secrets.DOCKER_PASSWORD}} run: docker login -u $DOCKER_USER -p $DOCKER_PASSWORD - - name: Get tag from GITHUB_REF - run: echo "TAG=${GITHUB_REF#refs/tags/}" >> $GITHUB_ENV + - name: Set env + run: echo "RELEASE_VERSION=${GITHUB_REF#refs/*/}" >> $GITHUB_ENV + - name: Test + run: | + echo $RELEASE_VERSION + echo ${{ env.RELEASE_VERSION }} - name: Build and push dockerhub uses: docker/build-push-action@v5 with: context: . platforms: linux/amd64,linux/arm64 push: true - tags: onedatashare/transfer_service:${{env.TAG}} + tags: onedatashare/transfer_service:${{ env.RELEASE_VERSION }} push-to-ecr: needs: build @@ -53,7 +57,7 @@ jobs: env: ECR_REGISTRY: ${{ steps.login-pf-aws-ecr.outputs.registry }} ECR_REPOSITORY: onedatashare/transfer_service - IMAGE_TAG: ${{ env.TAG }} + IMAGE_TAG: ${{ env.RELEASE_VERSION }} run: | docker pull onedatashare/transfer_service:$IMAGE_TAG docker tag onedatashare/transfer_service:$IMAGE_TAG $ECR_REGISTRY/$ECR_REPOSITORY:$IMAGE_TAG From 23940abdcabb3837cbb86c035e9254861764a4fb Mon Sep 17 00:00:00 2001 From: Venkata Phani Kumar Gottipati Date: Sun, 10 Mar 2024 21:14:46 -0400 Subject: [PATCH 077/150] Publish to ECR --- .github/workflows/docker-image.yml | 16 ++++++---------- 1 file changed, 6 insertions(+), 10 deletions(-) diff --git a/.github/workflows/docker-image.yml b/.github/workflows/docker-image.yml index 82bf680b..e024e00b 100644 --- a/.github/workflows/docker-image.yml +++ b/.github/workflows/docker-image.yml @@ -3,7 +3,7 @@ name: Docker Image CI on: push: tags: - - 'v*.*.*-beta' + - '*' branches: [ "master" ] pull_request: branches: [ "master" ] @@ -24,19 +24,15 @@ jobs: DOCKER_PASSWORD: ${{secrets.DOCKER_PASSWORD}} run: docker login -u $DOCKER_USER -p $DOCKER_PASSWORD - - name: Set env - run: echo "RELEASE_VERSION=${GITHUB_REF#refs/*/}" >> $GITHUB_ENV - - name: Test - run: | - echo $RELEASE_VERSION - echo ${{ env.RELEASE_VERSION }} + - name: Get tag from git tag + run: echo "TAG=$(git tag)" >> $GITHUB_ENV - name: Build and push dockerhub uses: docker/build-push-action@v5 with: context: . platforms: linux/amd64,linux/arm64 push: true - tags: onedatashare/transfer_service:${{ env.RELEASE_VERSION }} + tags: onedatashare/transfer_service:${{ env.TAG }} push-to-ecr: needs: build @@ -57,8 +53,8 @@ jobs: env: ECR_REGISTRY: ${{ steps.login-pf-aws-ecr.outputs.registry }} ECR_REPOSITORY: onedatashare/transfer_service - IMAGE_TAG: ${{ env.RELEASE_VERSION }} + IMAGE_TAG: ${{ env.TAG }} run: | docker pull onedatashare/transfer_service:$IMAGE_TAG docker tag onedatashare/transfer_service:$IMAGE_TAG $ECR_REGISTRY/$ECR_REPOSITORY:$IMAGE_TAG - docker push $ECR_REGISTRY/$ECR_REPOSITORY:$IMAGE_TAG \ No newline at end of file + docker push $ECR_REGISTRY/$ECR_REPOSITORY:$IMAGE_TAG From 5685da09e3d2db56a60a9162b611d162458fea72 Mon Sep 17 00:00:00 2001 From: Venkata Phani Kumar Gottipati Date: Sun, 10 Mar 2024 21:24:58 -0400 Subject: [PATCH 078/150] Publish to ECR --- .github/workflows/docker-image.yml | 11 +++++++---- 1 file changed, 7 insertions(+), 4 deletions(-) diff --git a/.github/workflows/docker-image.yml b/.github/workflows/docker-image.yml index e024e00b..26d3266b 100644 --- a/.github/workflows/docker-image.yml +++ b/.github/workflows/docker-image.yml @@ -11,7 +11,12 @@ on: jobs: build: runs-on: ubuntu-latest + outputs: + id: git_tag_output steps: + - name: Get Git Tag + id: git_tag_step + run: echo "::set-output name=git_tag_output::$(git tag)" - name: Checkout uses: actions/checkout@v4 - name: Set up QEMU @@ -24,15 +29,13 @@ jobs: DOCKER_PASSWORD: ${{secrets.DOCKER_PASSWORD}} run: docker login -u $DOCKER_USER -p $DOCKER_PASSWORD - - name: Get tag from git tag - run: echo "TAG=$(git tag)" >> $GITHUB_ENV - name: Build and push dockerhub uses: docker/build-push-action@v5 with: context: . platforms: linux/amd64,linux/arm64 push: true - tags: onedatashare/transfer_service:${{ env.TAG }} + tags: onedatashare/transfer_service:${{ needs.build.outputs.git_tag_output }} push-to-ecr: needs: build @@ -53,7 +56,7 @@ jobs: env: ECR_REGISTRY: ${{ steps.login-pf-aws-ecr.outputs.registry }} ECR_REPOSITORY: onedatashare/transfer_service - IMAGE_TAG: ${{ env.TAG }} + IMAGE_TAG: ${{ needs.build.outputs.git_tag_output }} run: | docker pull onedatashare/transfer_service:$IMAGE_TAG docker tag onedatashare/transfer_service:$IMAGE_TAG $ECR_REGISTRY/$ECR_REPOSITORY:$IMAGE_TAG From 35cc68430187abad9e35d86ee263efce3c78cb1a Mon Sep 17 00:00:00 2001 From: Venkata Phani Kumar Gottipati Date: Sun, 10 Mar 2024 21:27:24 -0400 Subject: [PATCH 079/150] Publish to ECR --- .github/workflows/docker-image.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/docker-image.yml b/.github/workflows/docker-image.yml index 26d3266b..b592fa74 100644 --- a/.github/workflows/docker-image.yml +++ b/.github/workflows/docker-image.yml @@ -14,11 +14,11 @@ jobs: outputs: id: git_tag_output steps: + - name: Checkout + uses: actions/checkout@v4 - name: Get Git Tag id: git_tag_step run: echo "::set-output name=git_tag_output::$(git tag)" - - name: Checkout - uses: actions/checkout@v4 - name: Set up QEMU uses: docker/setup-qemu-action@v3 - name: Set up Docker Buildx From 6e4cb9f7600f8c4d9e4dc798a813a4dc98cdd3b3 Mon Sep 17 00:00:00 2001 From: Venkata Phani Kumar Gottipati Date: Sun, 10 Mar 2024 21:28:29 -0400 Subject: [PATCH 080/150] Publish to ECR --- .github/workflows/docker-image.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/docker-image.yml b/.github/workflows/docker-image.yml index b592fa74..7d3e0c38 100644 --- a/.github/workflows/docker-image.yml +++ b/.github/workflows/docker-image.yml @@ -18,7 +18,7 @@ jobs: uses: actions/checkout@v4 - name: Get Git Tag id: git_tag_step - run: echo "::set-output name=git_tag_output::$(git tag)" + run: echo $(git tag) - name: Set up QEMU uses: docker/setup-qemu-action@v3 - name: Set up Docker Buildx From 896da9e2c86d46e63ebcfc2b0904e821a7e2c51f Mon Sep 17 00:00:00 2001 From: Venkata Phani Kumar Gottipati Date: Sun, 10 Mar 2024 21:29:31 -0400 Subject: [PATCH 081/150] Publish to ECR --- .github/workflows/docker-image.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/docker-image.yml b/.github/workflows/docker-image.yml index 7d3e0c38..67732541 100644 --- a/.github/workflows/docker-image.yml +++ b/.github/workflows/docker-image.yml @@ -18,7 +18,7 @@ jobs: uses: actions/checkout@v4 - name: Get Git Tag id: git_tag_step - run: echo $(git tag) + run: git tag - name: Set up QEMU uses: docker/setup-qemu-action@v3 - name: Set up Docker Buildx From ac2077fc5a8fc214e2c76d849d9d489d54a11e7a Mon Sep 17 00:00:00 2001 From: Venkata Phani Kumar Gottipati Date: Sun, 10 Mar 2024 21:31:05 -0400 Subject: [PATCH 082/150] Publish to ECR --- .github/workflows/docker-image.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/docker-image.yml b/.github/workflows/docker-image.yml index 67732541..9a25490c 100644 --- a/.github/workflows/docker-image.yml +++ b/.github/workflows/docker-image.yml @@ -16,9 +16,9 @@ jobs: steps: - name: Checkout uses: actions/checkout@v4 - - name: Get Git Tag - id: git_tag_step + - name: Run git tag run: git tag + shell: bash - name: Set up QEMU uses: docker/setup-qemu-action@v3 - name: Set up Docker Buildx From b7a376c26f9a24512f73d93a68db9f5c9d82fd91 Mon Sep 17 00:00:00 2001 From: Venkata Phani Kumar Gottipati Date: Sun, 10 Mar 2024 21:44:40 -0400 Subject: [PATCH 083/150] Publish to ECR --- .github/workflows/docker-image.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/docker-image.yml b/.github/workflows/docker-image.yml index 9a25490c..d46a841b 100644 --- a/.github/workflows/docker-image.yml +++ b/.github/workflows/docker-image.yml @@ -17,7 +17,7 @@ jobs: - name: Checkout uses: actions/checkout@v4 - name: Run git tag - run: git tag + run: echo $(git tag) shell: bash - name: Set up QEMU uses: docker/setup-qemu-action@v3 From d810c4902b70680cb746d7b190f4f788628e8f48 Mon Sep 17 00:00:00 2001 From: Venkata Phani Kumar Gottipati Date: Mon, 11 Mar 2024 00:39:39 -0400 Subject: [PATCH 084/150] Publish to ECR --- .github/workflows/docker-image.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/docker-image.yml b/.github/workflows/docker-image.yml index d46a841b..950c161b 100644 --- a/.github/workflows/docker-image.yml +++ b/.github/workflows/docker-image.yml @@ -17,7 +17,7 @@ jobs: - name: Checkout uses: actions/checkout@v4 - name: Run git tag - run: echo $(git tag) + run: echo $(git) shell: bash - name: Set up QEMU uses: docker/setup-qemu-action@v3 From 00ed90c175c7d65c30e13fd1282e06f6d0441f89 Mon Sep 17 00:00:00 2001 From: Venkata Phani Kumar Gottipati Date: Mon, 11 Mar 2024 00:40:43 -0400 Subject: [PATCH 085/150] Publish to ECR --- .github/workflows/docker-image.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/docker-image.yml b/.github/workflows/docker-image.yml index 950c161b..d46a841b 100644 --- a/.github/workflows/docker-image.yml +++ b/.github/workflows/docker-image.yml @@ -17,7 +17,7 @@ jobs: - name: Checkout uses: actions/checkout@v4 - name: Run git tag - run: echo $(git) + run: echo $(git tag) shell: bash - name: Set up QEMU uses: docker/setup-qemu-action@v3 From 4aedb97468dff43264a6182c6f93327f13e95a6f Mon Sep 17 00:00:00 2001 From: Venkata Phani Kumar Gottipati Date: Mon, 11 Mar 2024 00:43:56 -0400 Subject: [PATCH 086/150] Publish to ECR --- .github/workflows/docker-image.yml | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/.github/workflows/docker-image.yml b/.github/workflows/docker-image.yml index d46a841b..248a4bcd 100644 --- a/.github/workflows/docker-image.yml +++ b/.github/workflows/docker-image.yml @@ -16,9 +16,9 @@ jobs: steps: - name: Checkout uses: actions/checkout@v4 - - name: Run git tag - run: echo $(git tag) - shell: bash + - name: Tag + run: git tag + shell: /usr/bin/bash {0} - name: Set up QEMU uses: docker/setup-qemu-action@v3 - name: Set up Docker Buildx From dd92ac235aa0f527ddaf89e8317915aaf46cb79f Mon Sep 17 00:00:00 2001 From: Venkata Phani Kumar Gottipati Date: Mon, 11 Mar 2024 11:56:05 -0400 Subject: [PATCH 087/150] New changes --- .github/workflows/docker-image.yml | 18 ++++++++++-------- 1 file changed, 10 insertions(+), 8 deletions(-) diff --git a/.github/workflows/docker-image.yml b/.github/workflows/docker-image.yml index 248a4bcd..1dfc97a5 100644 --- a/.github/workflows/docker-image.yml +++ b/.github/workflows/docker-image.yml @@ -16,9 +16,9 @@ jobs: steps: - name: Checkout uses: actions/checkout@v4 - - name: Tag - run: git tag - shell: /usr/bin/bash {0} + - name: Set version + id: version + run: echo "VERSION=$(($(git describe --tags --abbrev=0 | sed 's/v//')+1))" >> $GITHUB_ENV - name: Set up QEMU uses: docker/setup-qemu-action@v3 - name: Set up Docker Buildx @@ -31,11 +31,13 @@ jobs: docker login -u $DOCKER_USER -p $DOCKER_PASSWORD - name: Build and push dockerhub uses: docker/build-push-action@v5 + env: + APP_VERSION: ${{ env.VERSION }} with: context: . platforms: linux/amd64,linux/arm64 push: true - tags: onedatashare/transfer_service:${{ needs.build.outputs.git_tag_output }} + tags: onedatashare/transfer_service:${APP_VERSION} push-to-ecr: needs: build @@ -56,8 +58,8 @@ jobs: env: ECR_REGISTRY: ${{ steps.login-pf-aws-ecr.outputs.registry }} ECR_REPOSITORY: onedatashare/transfer_service - IMAGE_TAG: ${{ needs.build.outputs.git_tag_output }} + IMAGE_TAG: ${{ env.VERSION }} run: | - docker pull onedatashare/transfer_service:$IMAGE_TAG - docker tag onedatashare/transfer_service:$IMAGE_TAG $ECR_REGISTRY/$ECR_REPOSITORY:$IMAGE_TAG - docker push $ECR_REGISTRY/$ECR_REPOSITORY:$IMAGE_TAG + docker pull onedatashare/transfer_service:${IMAGE_TAG} + docker tag onedatashare/transfer_service:${IMAGE_TAG} $ECR_REGISTRY/$ECR_REPOSITORY:${IMAGE_TAG} + docker push $ECR_REGISTRY/$ECR_REPOSITORY:${IMAGE_TAG} From 670ccd591f6f205cc2d4ac8bbe125fd4180a0c36 Mon Sep 17 00:00:00 2001 From: Venkata Phani Kumar Gottipati Date: Mon, 11 Mar 2024 11:56:56 -0400 Subject: [PATCH 088/150] New changes --- .github/workflows/docker-image.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/docker-image.yml b/.github/workflows/docker-image.yml index 1dfc97a5..7e61c693 100644 --- a/.github/workflows/docker-image.yml +++ b/.github/workflows/docker-image.yml @@ -37,7 +37,7 @@ jobs: context: . platforms: linux/amd64,linux/arm64 push: true - tags: onedatashare/transfer_service:${APP_VERSION} + tags: onedatashare/transfer_service:$APP_VERSION push-to-ecr: needs: build From e972a977414c2eed7da85cb3280a3104539324ed Mon Sep 17 00:00:00 2001 From: Venkata Phani Kumar Gottipati Date: Mon, 11 Mar 2024 11:57:58 -0400 Subject: [PATCH 089/150] New changes --- .github/workflows/docker-image.yml | 6 ++---- 1 file changed, 2 insertions(+), 4 deletions(-) diff --git a/.github/workflows/docker-image.yml b/.github/workflows/docker-image.yml index 7e61c693..fc4299a7 100644 --- a/.github/workflows/docker-image.yml +++ b/.github/workflows/docker-image.yml @@ -30,14 +30,12 @@ jobs: run: docker login -u $DOCKER_USER -p $DOCKER_PASSWORD - name: Build and push dockerhub - uses: docker/build-push-action@v5 - env: - APP_VERSION: ${{ env.VERSION }} + uses: docker/build-push-action@v5 with: context: . platforms: linux/amd64,linux/arm64 push: true - tags: onedatashare/transfer_service:$APP_VERSION + tags: onedatashare/transfer_service:${{ env.VERSION }} push-to-ecr: needs: build From 9dc2223a793bad63d780df0819b412330006d54b Mon Sep 17 00:00:00 2001 From: Venkata Phani Kumar Gottipati Date: Mon, 11 Mar 2024 13:25:54 -0400 Subject: [PATCH 090/150] New changes --- .github/workflows/docker-image.yml | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/.github/workflows/docker-image.yml b/.github/workflows/docker-image.yml index fc4299a7..cad0f318 100644 --- a/.github/workflows/docker-image.yml +++ b/.github/workflows/docker-image.yml @@ -18,7 +18,7 @@ jobs: uses: actions/checkout@v4 - name: Set version id: version - run: echo "VERSION=$(($(git describe --tags --abbrev=0 | sed 's/v//')+1))" >> $GITHUB_ENV + run: echo "VERSION=${GITHUB_REF#refs/tags/}" >> $GITHUB_ENV - name: Set up QEMU uses: docker/setup-qemu-action@v3 - name: Set up Docker Buildx @@ -58,6 +58,6 @@ jobs: ECR_REPOSITORY: onedatashare/transfer_service IMAGE_TAG: ${{ env.VERSION }} run: | - docker pull onedatashare/transfer_service:${IMAGE_TAG} - docker tag onedatashare/transfer_service:${IMAGE_TAG} $ECR_REGISTRY/$ECR_REPOSITORY:${IMAGE_TAG} - docker push $ECR_REGISTRY/$ECR_REPOSITORY:${IMAGE_TAG} + docker pull onedatashare/transfer_service:$IMAGE_TAG + docker tag onedatashare/transfer_service:$IMAGE_TAG $ECR_REGISTRY/$ECR_REPOSITORY:$IMAGE_TAG + docker push $ECR_REGISTRY/$ECR_REPOSITORY:$IMAGE_TAG From bb5ec6abf77426731c85aa871e0818c107297044 Mon Sep 17 00:00:00 2001 From: Venkata Phani Kumar Gottipati Date: Mon, 11 Mar 2024 13:37:08 -0400 Subject: [PATCH 091/150] New changes --- .github/workflows/docker-image.yml | 3 +++ 1 file changed, 3 insertions(+) diff --git a/.github/workflows/docker-image.yml b/.github/workflows/docker-image.yml index cad0f318..34a08f51 100644 --- a/.github/workflows/docker-image.yml +++ b/.github/workflows/docker-image.yml @@ -41,6 +41,9 @@ jobs: needs: build runs-on: ubuntu-latest steps: + - name: Set version + id: version + run: echo "VERSION=${GITHUB_REF#refs/tags/}" >> $GITHUB_ENV - name: Setup AWS ECR Details uses: aws-actions/configure-aws-credentials@v3 with: From 4972002380771d141d5db3cf617ddfa3f8dc79e9 Mon Sep 17 00:00:00 2001 From: Venkata Phani Kumar Gottipati Date: Mon, 11 Mar 2024 13:44:10 -0400 Subject: [PATCH 092/150] New changes --- .github/workflows/docker-image.yml | 22 +++++++++++++++++++--- 1 file changed, 19 insertions(+), 3 deletions(-) diff --git a/.github/workflows/docker-image.yml b/.github/workflows/docker-image.yml index 34a08f51..4d8fbba5 100644 --- a/.github/workflows/docker-image.yml +++ b/.github/workflows/docker-image.yml @@ -16,9 +16,17 @@ jobs: steps: - name: Checkout uses: actions/checkout@v4 + - name: Determine tag type + id: tag_type + run: echo "::set-output name=is_release_tag::$(echo ${GITHUB_REF#refs/tags/} | grep -E '^v[0-9]+\.[0-9]+\.[0-9]+$' || echo 'false')" - name: Set version id: version - run: echo "VERSION=${GITHUB_REF#refs/tags/}" >> $GITHUB_ENV + run: | + if [ "${{ steps.tag_type.outputs.is_release_tag }}" == "false" ]; then + echo "VERSION=v1.$GITHUB_RUN_NUMBER" >> $GITHUB_ENV + else + echo "VERSION=${GITHUB_REF#refs/tags/}" >> $GITHUB_ENV + fi - name: Set up QEMU uses: docker/setup-qemu-action@v3 - name: Set up Docker Buildx @@ -41,9 +49,17 @@ jobs: needs: build runs-on: ubuntu-latest steps: + - name: Determine tag type + id: tag_type + run: echo "::set-output name=is_release_tag::$(echo ${GITHUB_REF#refs/tags/} | grep -E '^v[0-9]+\.[0-9]+\.[0-9]+$' || echo 'false')" - name: Set version id: version - run: echo "VERSION=${GITHUB_REF#refs/tags/}" >> $GITHUB_ENV + run: | + if [ "${{ steps.tag_type.outputs.is_release_tag }}" == "false" ]; then + echo "VERSION=v1.$GITHUB_RUN_NUMBER" >> $GITHUB_ENV + else + echo "VERSION=${GITHUB_REF#refs/tags/}" >> $GITHUB_ENV + fi - name: Setup AWS ECR Details uses: aws-actions/configure-aws-credentials@v3 with: @@ -63,4 +79,4 @@ jobs: run: | docker pull onedatashare/transfer_service:$IMAGE_TAG docker tag onedatashare/transfer_service:$IMAGE_TAG $ECR_REGISTRY/$ECR_REPOSITORY:$IMAGE_TAG - docker push $ECR_REGISTRY/$ECR_REPOSITORY:$IMAGE_TAG + docker push $ECR_REGISTRY/$ECR_REPOSITORY:$IMAGE_TAG \ No newline at end of file From 05cedf3f39a59101eb77a87c60786b136a7e792e Mon Sep 17 00:00:00 2001 From: Venkata Phani Kumar Gottipati Date: Mon, 11 Mar 2024 15:03:25 -0400 Subject: [PATCH 093/150] New changes --- .github/workflows/docker-image.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/docker-image.yml b/.github/workflows/docker-image.yml index 4d8fbba5..bb9ae3f7 100644 --- a/.github/workflows/docker-image.yml +++ b/.github/workflows/docker-image.yml @@ -1,4 +1,4 @@ -name: Docker Image CI +name: Docker Image CI ( Dockerhub and ECR ) on: push: From 143e1127ac31b47b60b9ec285734956e098aa8a1 Mon Sep 17 00:00:00 2001 From: Venkata Phani Kumar Gottipati Date: Mon, 18 Mar 2024 11:17:25 -0400 Subject: [PATCH 094/150] Publish to ECR --- .github/workflows/docker-image.yml | 20 ++++---------------- 1 file changed, 4 insertions(+), 16 deletions(-) diff --git a/.github/workflows/docker-image.yml b/.github/workflows/docker-image.yml index bb9ae3f7..891908ac 100644 --- a/.github/workflows/docker-image.yml +++ b/.github/workflows/docker-image.yml @@ -1,12 +1,8 @@ -name: Docker Image CI ( Dockerhub and ECR ) +name: Docker Image Publish CI ( Dockerhub and ECR ) on: - push: - tags: - - '*' - branches: [ "master" ] - pull_request: - branches: [ "master" ] + release: + types: [created] jobs: build: @@ -22,11 +18,7 @@ jobs: - name: Set version id: version run: | - if [ "${{ steps.tag_type.outputs.is_release_tag }}" == "false" ]; then - echo "VERSION=v1.$GITHUB_RUN_NUMBER" >> $GITHUB_ENV - else echo "VERSION=${GITHUB_REF#refs/tags/}" >> $GITHUB_ENV - fi - name: Set up QEMU uses: docker/setup-qemu-action@v3 - name: Set up Docker Buildx @@ -55,17 +47,13 @@ jobs: - name: Set version id: version run: | - if [ "${{ steps.tag_type.outputs.is_release_tag }}" == "false" ]; then - echo "VERSION=v1.$GITHUB_RUN_NUMBER" >> $GITHUB_ENV - else echo "VERSION=${GITHUB_REF#refs/tags/}" >> $GITHUB_ENV - fi - name: Setup AWS ECR Details uses: aws-actions/configure-aws-credentials@v3 with: aws-access-key-id: ${{ secrets.AWS_ACCESS_KEY_ID }} aws-secret-access-key: ${{ secrets.AWS_SECRET_ACCESS_KEY }} - aws-region: us-west-1 + aws-region: us-east-1 - name: Login to Amazon ECR id: login-pf-aws-ecr From 00bfe1a93ea4111c925393ba6fd607ed97e36942 Mon Sep 17 00:00:00 2001 From: Jacob Goldverg Date: Tue, 2 Apr 2024 11:47:53 -0400 Subject: [PATCH 095/150] carbon rpc for pmeter is working --- .../odstransferservice/Enum/MessageType.java | 5 + .../config/RabbitMQConfig.java | 22 ++-- .../consumer/RabbitMQConsumer.java | 112 ++++++++++++++---- .../model/CarbonMeasureRequest.java | 10 ++ .../model/CarbonMeasureResponse.java | 9 ++ .../service/JobParamService.java | 4 +- .../service/PmeterParser.java | 46 ++++++- 7 files changed, 162 insertions(+), 46 deletions(-) create mode 100644 src/main/java/org/onedatashare/transferservice/odstransferservice/Enum/MessageType.java create mode 100644 src/main/java/org/onedatashare/transferservice/odstransferservice/model/CarbonMeasureRequest.java create mode 100644 src/main/java/org/onedatashare/transferservice/odstransferservice/model/CarbonMeasureResponse.java diff --git a/src/main/java/org/onedatashare/transferservice/odstransferservice/Enum/MessageType.java b/src/main/java/org/onedatashare/transferservice/odstransferservice/Enum/MessageType.java new file mode 100644 index 00000000..5acb02fe --- /dev/null +++ b/src/main/java/org/onedatashare/transferservice/odstransferservice/Enum/MessageType.java @@ -0,0 +1,5 @@ +package org.onedatashare.transferservice.odstransferservice.Enum; + +public enum MessageType { + CARBON_AVG_REQUEST, TRANSFER_JOB_REQUEST, APPLICATION_PARAM_CHANGE, CARBON_IP_REQUEST +} diff --git a/src/main/java/org/onedatashare/transferservice/odstransferservice/config/RabbitMQConfig.java b/src/main/java/org/onedatashare/transferservice/odstransferservice/config/RabbitMQConfig.java index be81ad00..63b3c18c 100644 --- a/src/main/java/org/onedatashare/transferservice/odstransferservice/config/RabbitMQConfig.java +++ b/src/main/java/org/onedatashare/transferservice/odstransferservice/config/RabbitMQConfig.java @@ -1,13 +1,13 @@ package org.onedatashare.transferservice.odstransferservice.config; -import com.google.gson.*; -import org.springframework.amqp.core.*; +import org.springframework.amqp.core.Binding; +import org.springframework.amqp.core.BindingBuilder; +import org.springframework.amqp.core.DirectExchange; +import org.springframework.amqp.core.Queue; import org.springframework.beans.factory.annotation.Value; import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.Configuration; -import java.util.Date; -import java.util.Locale; @Configuration public class RabbitMQConfig { @@ -22,25 +22,17 @@ public class RabbitMQConfig { String routingKey; @Bean - public Gson gson() { - GsonBuilder builder = new GsonBuilder() - .registerTypeAdapter(Date.class, (JsonDeserializer) (json, typeOfT, context) -> new Date(json.getAsJsonPrimitive().getAsLong())); - return builder.create(); - } - - @Bean - Queue userQueue(){ - //String name, boolean durable, boolean exclusive, boolean autoDelete + Queue userQueue() { return new Queue(this.queueName, true, false, false); } @Bean - public DirectExchange exchange(){ + public DirectExchange exchange() { return new DirectExchange(exchange); } @Bean - public Binding binding(DirectExchange exchange, Queue userQueue){ + public Binding binding(DirectExchange exchange, Queue userQueue) { return BindingBuilder.bind(userQueue) .to(exchange) .with(routingKey); diff --git a/src/main/java/org/onedatashare/transferservice/odstransferservice/consumer/RabbitMQConsumer.java b/src/main/java/org/onedatashare/transferservice/odstransferservice/consumer/RabbitMQConsumer.java index e70182a7..62538b4e 100644 --- a/src/main/java/org/onedatashare/transferservice/odstransferservice/consumer/RabbitMQConsumer.java +++ b/src/main/java/org/onedatashare/transferservice/odstransferservice/consumer/RabbitMQConsumer.java @@ -2,36 +2,51 @@ import com.fasterxml.jackson.annotation.JsonInclude; +import com.fasterxml.jackson.core.JsonProcessingException; import com.fasterxml.jackson.databind.DeserializationFeature; import com.fasterxml.jackson.databind.ObjectMapper; -import org.onedatashare.transferservice.odstransferservice.Enum.EndpointType; -import org.onedatashare.transferservice.odstransferservice.model.EntityInfo; +import org.onedatashare.transferservice.odstransferservice.Enum.MessageType; +import org.onedatashare.transferservice.odstransferservice.model.CarbonMeasureRequest; +import org.onedatashare.transferservice.odstransferservice.model.CarbonMeasureResponse; import org.onedatashare.transferservice.odstransferservice.model.TransferJobRequest; +import org.onedatashare.transferservice.odstransferservice.model.metrics.CarbonScore; import org.onedatashare.transferservice.odstransferservice.model.optimizer.TransferApplicationParams; import org.onedatashare.transferservice.odstransferservice.pools.ThreadPoolContract; import org.onedatashare.transferservice.odstransferservice.service.JobControl; import org.onedatashare.transferservice.odstransferservice.service.JobParamService; -import org.onedatashare.transferservice.odstransferservice.service.expanders.VfsExpander; +import org.onedatashare.transferservice.odstransferservice.service.PmeterParser; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.amqp.core.Message; +import org.springframework.amqp.core.MessageBuilder; +import org.springframework.amqp.core.MessagePostProcessor; import org.springframework.amqp.core.Queue; import org.springframework.amqp.rabbit.annotation.RabbitListener; +import org.springframework.amqp.rabbit.core.RabbitTemplate; import org.springframework.batch.core.JobParameters; import org.springframework.batch.core.JobParametersBuilder; import org.springframework.batch.core.launch.JobLauncher; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.beans.factory.annotation.Value; +import org.springframework.http.MediaType; import org.springframework.stereotype.Service; -import java.util.ArrayList; -import java.util.List; +import java.util.HashMap; +import java.util.Map; + +import static org.springframework.amqp.core.MessageProperties.CONTENT_TYPE_JSON; @Service public class RabbitMQConsumer { private final ObjectMapper objectMapper; private final ThreadPoolContract threadPool; + private final PmeterParser pmeterParser; Logger logger = LoggerFactory.getLogger(RabbitMQConsumer.class); + @Value("${spring.application.name}") + String applicationName; + JobControl jc; JobLauncher jobLauncher; @@ -40,8 +55,11 @@ public class RabbitMQConsumer { Queue userQueue; + @Autowired + RabbitTemplate rabbitTemplate; + - public RabbitMQConsumer(Queue userQueue, JobParamService jobParamService, JobLauncher asyncJobLauncher, JobControl jc, ThreadPoolContract threadPool) { + public RabbitMQConsumer(Queue userQueue, JobParamService jobParamService, JobLauncher asyncJobLauncher, JobControl jc, ThreadPoolContract threadPool, PmeterParser pmeterParser) { this.userQueue = userQueue; this.jobParamService = jobParamService; this.jobLauncher = asyncJobLauncher; @@ -50,31 +68,77 @@ public RabbitMQConsumer(Queue userQueue, JobParamService jobParamService, JobLau this.objectMapper.configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, true); this.objectMapper.setDefaultPropertyInclusion(JsonInclude.Include.ALWAYS); this.threadPool = threadPool; + this.pmeterParser = pmeterParser; } @RabbitListener(queues = "#{userQueue}") - public void consumeDefaultMessage(final Message message) { + public void consumeDefaultMessage(Message message) throws JsonProcessingException { String jsonStr = new String(message.getBody()); + MessageType messageType = MessageType.valueOf(message.getMessageProperties().getHeader("type")); + switch (messageType) { - logger.info("Message recv: {}", jsonStr); - try { - TransferJobRequest request = objectMapper.readValue(jsonStr, TransferJobRequest.class); - logger.info("Job Recieved: {}", request.toString()); + case TRANSFER_JOB_REQUEST: { + TransferJobRequest request = objectMapper.readValue(jsonStr, TransferJobRequest.class); + logger.info("Job Received: {}", request.toString()); + JobParameters parameters = jobParamService.translate(new JobParametersBuilder(), request); + try { + jc.setRequest(request); + jobLauncher.run(jc.concurrentJobDefinition(), parameters); + return; + } catch (Exception e) { + logger.error(e.getMessage()); + } + } - JobParameters parameters = jobParamService.translate(new JobParametersBuilder(), request); - jc.setRequest(request); - jobLauncher.run(jc.concurrentJobDefinition(), parameters); + case APPLICATION_PARAM_CHANGE: { + TransferApplicationParams params = objectMapper.readValue(jsonStr, TransferApplicationParams.class); + logger.info("Parsed TransferApplicationParams: {}", params); + this.threadPool.applyOptimizer(params.getConcurrency(), params.getParallelism()); + } - return; - } catch (Exception e) { - logger.error("Failed to parse jsonStr: {} to TransferJobRequest.java", jsonStr); - } - try { - TransferApplicationParams params = objectMapper.readValue(jsonStr, TransferApplicationParams.class); - logger.info("Parsed TransferApplicationParams: {}", params); - this.threadPool.applyOptimizer(params.getConcurrency(), params.getParallelism()); - } catch (Exception e) { - logger.error("Did not apply transfer params due to parsing message failure"); + case CARBON_AVG_REQUEST: { + CarbonMeasureRequest carbonMeasureRequest = objectMapper.readValue(jsonStr, CarbonMeasureRequest.class); + logger.info("Received CarbonMeasureRequest: {}", carbonMeasureRequest); + CarbonScore sourceCarbonScore = this.pmeterParser.carbonAverageTraceRoute(carbonMeasureRequest.sourceIp); + CarbonScore destCarbonScore = this.pmeterParser.carbonAverageTraceRoute(carbonMeasureRequest.destinationIp); + double average = (double) (sourceCarbonScore.getAvgCarbon() + destCarbonScore.getAvgCarbon()) / 2; + CarbonMeasureResponse resp = new CarbonMeasureResponse(); + resp.transferNodeName = this.applicationName; + resp.averageCarbonIntensity = average; + logger.info("Response: CarbonMeasureResponse: {}", resp); + String jsonResp = this.objectMapper.writeValueAsString(resp); + MessagePostProcessor messagePostProcessor = this.embedMessageType(message.getMessageProperties().getCorrelationId()); + Message msg = MessageBuilder.withBody(jsonResp.getBytes()) + .setContentType(MediaType.APPLICATION_JSON_VALUE) + .build(); + this.rabbitTemplate.convertAndSend(message.getMessageProperties().getReplyTo(), msg, messagePostProcessor); + } + + case CARBON_IP_REQUEST: { + CarbonMeasureRequest carbonMeasureRequest = objectMapper.readValue(jsonStr, CarbonMeasureRequest.class); + logger.info("Received CarbonMeasureRequest: {}", carbonMeasureRequest); + Map sourceTraceRouteCarbon = this.pmeterParser.carbonPerIp(carbonMeasureRequest.sourceIp); + Map destinationTraceRouteCarbon = this.pmeterParser.carbonPerIp(carbonMeasureRequest.destinationIp); + Map mergedMap = new HashMap<>(); + mergedMap.putAll(sourceTraceRouteCarbon); + mergedMap.putAll(destinationTraceRouteCarbon); + String jsonResp = this.objectMapper.writeValueAsString(mergedMap); + MessagePostProcessor messagePostProcessor = this.embedMessageType(message.getMessageProperties().getCorrelationId()); + Message msg = MessageBuilder.withBody(jsonResp.getBytes()) + .setContentType(MediaType.APPLICATION_JSON_VALUE) + .build(); + this.rabbitTemplate.convertAndSend(message.getMessageProperties().getReplyTo(), msg, messagePostProcessor); + + } } } + + public MessagePostProcessor embedMessageType(String correlationId) { + return message -> { + message.getMessageProperties().setCorrelationId(correlationId); + message.getMessageProperties().setType(CONTENT_TYPE_JSON); + return message; + }; + } + } \ No newline at end of file diff --git a/src/main/java/org/onedatashare/transferservice/odstransferservice/model/CarbonMeasureRequest.java b/src/main/java/org/onedatashare/transferservice/odstransferservice/model/CarbonMeasureRequest.java new file mode 100644 index 00000000..dbf9f59e --- /dev/null +++ b/src/main/java/org/onedatashare/transferservice/odstransferservice/model/CarbonMeasureRequest.java @@ -0,0 +1,10 @@ +package org.onedatashare.transferservice.odstransferservice.model; + +import lombok.Data; + +@Data +public class CarbonMeasureRequest { + public String transferNodeName; + public String sourceIp; + public String destinationIp; +} diff --git a/src/main/java/org/onedatashare/transferservice/odstransferservice/model/CarbonMeasureResponse.java b/src/main/java/org/onedatashare/transferservice/odstransferservice/model/CarbonMeasureResponse.java new file mode 100644 index 00000000..5f260990 --- /dev/null +++ b/src/main/java/org/onedatashare/transferservice/odstransferservice/model/CarbonMeasureResponse.java @@ -0,0 +1,9 @@ +package org.onedatashare.transferservice.odstransferservice.model; + +import lombok.Data; + +@Data +public class CarbonMeasureResponse { + public String transferNodeName; + public Double averageCarbonIntensity; +} diff --git a/src/main/java/org/onedatashare/transferservice/odstransferservice/service/JobParamService.java b/src/main/java/org/onedatashare/transferservice/odstransferservice/service/JobParamService.java index 3e3cb43c..ebde47dc 100644 --- a/src/main/java/org/onedatashare/transferservice/odstransferservice/service/JobParamService.java +++ b/src/main/java/org/onedatashare/transferservice/odstransferservice/service/JobParamService.java @@ -79,7 +79,7 @@ public JobParameters translate(JobParametersBuilder builder, TransferJobRequest String sourceIp = this.uriFromEndpointCredential(request.getSource().getVfsSourceCredential(), sourceType); builder.addString(SOURCE_HOST, sourceIp); builder.addLong(SOURCE_PORT, (long) this.portFromEndpointCredential(request.getSource().getVfsSourceCredential(), sourceType)); - CarbonScore score = this.pmeterParser.runCarbonPmeter(sourceIp); + CarbonScore score = this.pmeterParser.carbonAverageTraceRoute(sourceIp); logger.info("Source Carbon Score: {}", score.avgCarbon); builder.addLong(CARBON_SCORE_SOURCE, (long) score.avgCarbon); } else if (request.getSource().getOauthSourceCredential() != null) { @@ -90,7 +90,7 @@ public JobParameters translate(JobParametersBuilder builder, TransferJobRequest String destIp = this.uriFromEndpointCredential(request.getDestination().getVfsDestCredential(), destType); builder.addString(DEST_HOST, destIp); builder.addLong(DEST_PORT, (long) this.portFromEndpointCredential(request.getDestination().getVfsDestCredential(), destType)); - CarbonScore score = this.pmeterParser.runCarbonPmeter(destIp); + CarbonScore score = this.pmeterParser.carbonAverageTraceRoute(destIp); logger.info("Destination Carbon Score: {}", score.avgCarbon); builder.addLong(CARBON_SCORE_DEST, (long)score.avgCarbon); } else if (request.getDestination().getOauthDestCredential() != null) { diff --git a/src/main/java/org/onedatashare/transferservice/odstransferservice/service/PmeterParser.java b/src/main/java/org/onedatashare/transferservice/odstransferservice/service/PmeterParser.java index a9c581a2..e33af620 100644 --- a/src/main/java/org/onedatashare/transferservice/odstransferservice/service/PmeterParser.java +++ b/src/main/java/org/onedatashare/transferservice/odstransferservice/service/PmeterParser.java @@ -19,7 +19,9 @@ import java.nio.file.Path; import java.nio.file.Paths; import java.util.ArrayList; +import java.util.HashMap; import java.util.List; +import java.util.Map; @Service public class PmeterParser { @@ -35,8 +37,11 @@ public class PmeterParser { @Value("${pmeter.carbon.path}") String pmeterCarbonPath; + @Value("${pmeter.carbon.map}") + String pmeterCarbonMapPath; + @Value("${pmeter.report.path}") - String pmeterReportPath; + String pmeterMetricsPath; @Value("${pmeter.interface}") String pmeterNic; @@ -61,7 +66,7 @@ public void init() { this.cmdLine = CommandLine.parse( String.format("pmeter " + MEASURE + " %s --user %s --measure %s %s --file_name %s", pmeterNic, odsUser, - measureCount, pmeterOptions, pmeterReportPath)); + measureCount, pmeterOptions, pmeterMetricsPath)); } public PmeterParser(ObjectMapper pmeterMapper) { @@ -87,7 +92,7 @@ public void runPmeter() { } public List parsePmeterOutput() throws IOException { - Path path = Paths.get(pmeterReportPath); + Path path = Paths.get(pmeterMetricsPath); List allLines = Files.readAllLines(path); List ret = new ArrayList<>(); for (String line : allLines) { @@ -99,14 +104,18 @@ public List parsePmeterOutput() throws IOException { return ret; } - public CarbonScore runCarbonPmeter(String ip) { + public CarbonScore carbonAverageTraceRoute(String ip) { //pmeter carbon 129.114.108.45 - if(this.toggle == false){return new CarbonScore();} + if (this.toggle == false) { + return new CarbonScore(); + } + if (ip == null || ip.isEmpty()) return new CarbonScore(); CommandLine carbonCmd = CommandLine.parse(String.format("pmeter carbon %s", ip)); try { DefaultExecutor carbonExecutor = new DefaultExecutor(); carbonExecutor.execute(carbonCmd); } catch (IOException e) { + e.printStackTrace(); return new CarbonScore(); } try { @@ -124,4 +133,31 @@ public CarbonScore runCarbonPmeter(String ip) { return new CarbonScore(); } } + + public Map carbonPerIp(String ip) { + if (ip == null || ip.isEmpty()) return new HashMap<>(); + CommandLine carbonCmd = CommandLine.parse(String.format("pmeter carbon %s --save_per_ip=True", ip)); + try { + DefaultExecutor carbonExecutor = new DefaultExecutor(); + carbonExecutor.execute(carbonCmd); + } catch (IOException e) { + e.printStackTrace(); + return new HashMap<>(); + } + try { + Path filePath = Paths.get(this.pmeterCarbonMapPath); + logger.info("Pmeter Carbon map file path: {}", filePath); + List lines = Files.readAllLines(filePath); + logger.info("CarbonMap lines: {}", lines); + String lastLine = lines.getLast(); + HashMap measurement = this.pmeterMapper.readValue(lastLine, HashMap.class); + filePath.toFile().delete(); + filePath.toFile().createNewFile(); + logger.info("Carbon IP Map: {}", measurement); + return measurement; + } catch (IOException e) { + e.printStackTrace(); + } + return new HashMap<>(); + } } From 3eaccb3b83072eee229e852d523360701fbb5e06 Mon Sep 17 00:00:00 2001 From: Jacob Goldverg Date: Thu, 4 Apr 2024 13:19:28 -0400 Subject: [PATCH 096/150] fairly large refactor of job control, and implemented a clean approach for RPC messagin using polymorphism. --- .../config/BatchConfig.java | 17 ++- .../config/MetricsConfig.java | 21 ---- .../config/RabbitMQConfig.java | 17 ++- .../consumer/RabbitMQConsumer.java | 116 ++++-------------- .../controller/JobMonitor.java | 35 +----- .../controller/TransferController.java | 11 +- .../message/CarbonAvgRequestHandler.java | 63 ++++++++++ .../message/CarbonIpRequestHandler.java | 55 +++++++++ .../message/MessageHandler.java | 8 ++ .../TransferApplicationParamHandler.java | 32 +++++ .../message/TransferJobRequestHandler.java | 45 +++++++ .../service/JobControl.java | 25 ++-- .../service/PmeterParser.java | 8 +- .../listner/JobCompletionListener.java | 2 +- src/main/resources/application.properties | 2 + 15 files changed, 280 insertions(+), 177 deletions(-) delete mode 100644 src/main/java/org/onedatashare/transferservice/odstransferservice/config/MetricsConfig.java create mode 100644 src/main/java/org/onedatashare/transferservice/odstransferservice/message/CarbonAvgRequestHandler.java create mode 100644 src/main/java/org/onedatashare/transferservice/odstransferservice/message/CarbonIpRequestHandler.java create mode 100644 src/main/java/org/onedatashare/transferservice/odstransferservice/message/MessageHandler.java create mode 100644 src/main/java/org/onedatashare/transferservice/odstransferservice/message/TransferApplicationParamHandler.java create mode 100644 src/main/java/org/onedatashare/transferservice/odstransferservice/message/TransferJobRequestHandler.java diff --git a/src/main/java/org/onedatashare/transferservice/odstransferservice/config/BatchConfig.java b/src/main/java/org/onedatashare/transferservice/odstransferservice/config/BatchConfig.java index b27cd8ed..2b568308 100644 --- a/src/main/java/org/onedatashare/transferservice/odstransferservice/config/BatchConfig.java +++ b/src/main/java/org/onedatashare/transferservice/odstransferservice/config/BatchConfig.java @@ -1,27 +1,32 @@ package org.onedatashare.transferservice.odstransferservice.config; +import com.fasterxml.jackson.annotation.JsonInclude; +import com.fasterxml.jackson.databind.DeserializationFeature; +import com.fasterxml.jackson.databind.ObjectMapper; import org.springframework.batch.core.launch.JobLauncher; import org.springframework.batch.core.launch.support.TaskExecutorJobLauncher; import org.springframework.batch.core.repository.JobRepository; import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.Configuration; import org.springframework.core.task.SimpleAsyncTaskExecutor; +import org.springframework.core.task.SyncTaskExecutor; import org.springframework.jdbc.datasource.DataSourceTransactionManager; import org.springframework.retry.backoff.BackOffPolicy; import org.springframework.retry.backoff.ExponentialBackOffPolicy; import org.springframework.transaction.PlatformTransactionManager; import javax.sql.DataSource; -import java.util.HashSet; -import java.util.Set; import java.util.concurrent.TimeUnit; @Configuration public class BatchConfig { @Bean - public Set jobIds() { - return new HashSet<>(); + public ObjectMapper messageObjectMapper() { + ObjectMapper objectMapper = new ObjectMapper(); + objectMapper.configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, true); + objectMapper.setDefaultPropertyInclusion(JsonInclude.Include.ALWAYS); + return objectMapper; } @Bean @@ -30,10 +35,10 @@ public PlatformTransactionManager transactionManager(DataSource dataSource) { } @Bean - public JobLauncher asyncJobLauncher(JobRepository jobRepository) { + public JobLauncher jobLauncher(JobRepository jobRepository) { TaskExecutorJobLauncher jobLauncher = new TaskExecutorJobLauncher(); jobLauncher.setJobRepository(jobRepository); - jobLauncher.setTaskExecutor(new SimpleAsyncTaskExecutor()); + jobLauncher.setTaskExecutor(new SyncTaskExecutor()); return jobLauncher; } diff --git a/src/main/java/org/onedatashare/transferservice/odstransferservice/config/MetricsConfig.java b/src/main/java/org/onedatashare/transferservice/odstransferservice/config/MetricsConfig.java deleted file mode 100644 index ebd324b5..00000000 --- a/src/main/java/org/onedatashare/transferservice/odstransferservice/config/MetricsConfig.java +++ /dev/null @@ -1,21 +0,0 @@ -package org.onedatashare.transferservice.odstransferservice.config; - -import com.fasterxml.jackson.databind.ObjectMapper; -import com.fasterxml.jackson.databind.SerializationFeature; -import com.fasterxml.jackson.datatype.jsr310.JavaTimeModule; -import lombok.Data; -import org.springframework.context.annotation.Bean; -import org.springframework.context.annotation.Configuration; - -@Configuration -@Data -public class MetricsConfig { - @Bean - public ObjectMapper pmeterMapper() { - ObjectMapper objectMapper = new ObjectMapper(); - objectMapper.registerModule(new JavaTimeModule()); - objectMapper.configure(SerializationFeature.WRITE_DATE_KEYS_AS_TIMESTAMPS, false); - return objectMapper; - } - -} diff --git a/src/main/java/org/onedatashare/transferservice/odstransferservice/config/RabbitMQConfig.java b/src/main/java/org/onedatashare/transferservice/odstransferservice/config/RabbitMQConfig.java index 63b3c18c..5e30e1bc 100644 --- a/src/main/java/org/onedatashare/transferservice/odstransferservice/config/RabbitMQConfig.java +++ b/src/main/java/org/onedatashare/transferservice/odstransferservice/config/RabbitMQConfig.java @@ -1,9 +1,8 @@ package org.onedatashare.transferservice.odstransferservice.config; -import org.springframework.amqp.core.Binding; -import org.springframework.amqp.core.BindingBuilder; -import org.springframework.amqp.core.DirectExchange; -import org.springframework.amqp.core.Queue; +import org.springframework.amqp.core.*; +import org.springframework.amqp.rabbit.config.SimpleRabbitListenerContainerFactory; +import org.springframework.amqp.rabbit.connection.ConnectionFactory; import org.springframework.beans.factory.annotation.Value; import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.Configuration; @@ -37,4 +36,14 @@ public Binding binding(DirectExchange exchange, Queue userQueue) { .to(exchange) .with(routingKey); } + + @Bean + public SimpleRabbitListenerContainerFactory rabbitListenerContainerFactory(ConnectionFactory connectionFactory) { + SimpleRabbitListenerContainerFactory factory = new SimpleRabbitListenerContainerFactory(); + factory.setConnectionFactory(connectionFactory); + factory.setPrefetchCount(1); // Set prefetch count to 1 + factory.setAcknowledgeMode(AcknowledgeMode.MANUAL); // Use manual acknowledgment mode + return factory; + } + } diff --git a/src/main/java/org/onedatashare/transferservice/odstransferservice/consumer/RabbitMQConsumer.java b/src/main/java/org/onedatashare/transferservice/odstransferservice/consumer/RabbitMQConsumer.java index 62538b4e..386ff9ed 100644 --- a/src/main/java/org/onedatashare/transferservice/odstransferservice/consumer/RabbitMQConsumer.java +++ b/src/main/java/org/onedatashare/transferservice/odstransferservice/consumer/RabbitMQConsumer.java @@ -1,139 +1,73 @@ package org.onedatashare.transferservice.odstransferservice.consumer; -import com.fasterxml.jackson.annotation.JsonInclude; import com.fasterxml.jackson.core.JsonProcessingException; -import com.fasterxml.jackson.databind.DeserializationFeature; -import com.fasterxml.jackson.databind.ObjectMapper; +import com.rabbitmq.client.Channel; import org.onedatashare.transferservice.odstransferservice.Enum.MessageType; -import org.onedatashare.transferservice.odstransferservice.model.CarbonMeasureRequest; -import org.onedatashare.transferservice.odstransferservice.model.CarbonMeasureResponse; -import org.onedatashare.transferservice.odstransferservice.model.TransferJobRequest; -import org.onedatashare.transferservice.odstransferservice.model.metrics.CarbonScore; -import org.onedatashare.transferservice.odstransferservice.model.optimizer.TransferApplicationParams; -import org.onedatashare.transferservice.odstransferservice.pools.ThreadPoolContract; -import org.onedatashare.transferservice.odstransferservice.service.JobControl; -import org.onedatashare.transferservice.odstransferservice.service.JobParamService; -import org.onedatashare.transferservice.odstransferservice.service.PmeterParser; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; +import org.onedatashare.transferservice.odstransferservice.message.CarbonAvgRequestHandler; +import org.onedatashare.transferservice.odstransferservice.message.CarbonIpRequestHandler; +import org.onedatashare.transferservice.odstransferservice.message.TransferApplicationParamHandler; +import org.onedatashare.transferservice.odstransferservice.message.TransferJobRequestHandler; import org.springframework.amqp.core.Message; -import org.springframework.amqp.core.MessageBuilder; import org.springframework.amqp.core.MessagePostProcessor; import org.springframework.amqp.core.Queue; import org.springframework.amqp.rabbit.annotation.RabbitListener; import org.springframework.amqp.rabbit.core.RabbitTemplate; -import org.springframework.batch.core.JobParameters; -import org.springframework.batch.core.JobParametersBuilder; -import org.springframework.batch.core.launch.JobLauncher; -import org.springframework.beans.factory.annotation.Autowired; -import org.springframework.beans.factory.annotation.Value; -import org.springframework.http.MediaType; +import org.springframework.amqp.support.AmqpHeaders; +import org.springframework.messaging.handler.annotation.Header; import org.springframework.stereotype.Service; -import java.util.HashMap; -import java.util.Map; +import java.io.IOException; import static org.springframework.amqp.core.MessageProperties.CONTENT_TYPE_JSON; @Service public class RabbitMQConsumer { - private final ObjectMapper objectMapper; - private final ThreadPoolContract threadPool; - private final PmeterParser pmeterParser; - Logger logger = LoggerFactory.getLogger(RabbitMQConsumer.class); + private final TransferJobRequestHandler transferJobRequestHandler; + private final CarbonAvgRequestHandler carbonAvgRequestHandler; + private final TransferApplicationParamHandler transferApplicationParamHandler; - @Value("${spring.application.name}") - String applicationName; - - JobControl jc; - - JobLauncher jobLauncher; - - JobParamService jobParamService; + private final CarbonIpRequestHandler carbonIpRequestHandler; Queue userQueue; - @Autowired RabbitTemplate rabbitTemplate; - public RabbitMQConsumer(Queue userQueue, JobParamService jobParamService, JobLauncher asyncJobLauncher, JobControl jc, ThreadPoolContract threadPool, PmeterParser pmeterParser) { + public RabbitMQConsumer(RabbitTemplate rabbitTemplate, Queue userQueue, TransferJobRequestHandler transferJobRequestHandler, CarbonAvgRequestHandler carbonAvgRequestHandler, TransferApplicationParamHandler transferApplicationParamHandler, CarbonIpRequestHandler carbonIpRequestHandler) { this.userQueue = userQueue; - this.jobParamService = jobParamService; - this.jobLauncher = asyncJobLauncher; - this.jc = jc; - this.objectMapper = new ObjectMapper(); - this.objectMapper.configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, true); - this.objectMapper.setDefaultPropertyInclusion(JsonInclude.Include.ALWAYS); - this.threadPool = threadPool; - this.pmeterParser = pmeterParser; + this.transferJobRequestHandler = transferJobRequestHandler; + this.carbonAvgRequestHandler = carbonAvgRequestHandler; + this.transferApplicationParamHandler = transferApplicationParamHandler; + this.carbonIpRequestHandler = carbonIpRequestHandler; + this.rabbitTemplate = rabbitTemplate; } @RabbitListener(queues = "#{userQueue}") - public void consumeDefaultMessage(Message message) throws JsonProcessingException { - String jsonStr = new String(message.getBody()); + public void consumeDefaultMessage(Message message, Channel channel, @Header(AmqpHeaders.DELIVERY_TAG) long tag) throws IOException { MessageType messageType = MessageType.valueOf(message.getMessageProperties().getHeader("type")); switch (messageType) { - case TRANSFER_JOB_REQUEST: { - TransferJobRequest request = objectMapper.readValue(jsonStr, TransferJobRequest.class); - logger.info("Job Received: {}", request.toString()); - JobParameters parameters = jobParamService.translate(new JobParametersBuilder(), request); - try { - jc.setRequest(request); - jobLauncher.run(jc.concurrentJobDefinition(), parameters); - return; - } catch (Exception e) { - logger.error(e.getMessage()); - } + this.transferJobRequestHandler.messageHandler(message); } case APPLICATION_PARAM_CHANGE: { - TransferApplicationParams params = objectMapper.readValue(jsonStr, TransferApplicationParams.class); - logger.info("Parsed TransferApplicationParams: {}", params); - this.threadPool.applyOptimizer(params.getConcurrency(), params.getParallelism()); + this.transferApplicationParamHandler.messageHandler(message); } case CARBON_AVG_REQUEST: { - CarbonMeasureRequest carbonMeasureRequest = objectMapper.readValue(jsonStr, CarbonMeasureRequest.class); - logger.info("Received CarbonMeasureRequest: {}", carbonMeasureRequest); - CarbonScore sourceCarbonScore = this.pmeterParser.carbonAverageTraceRoute(carbonMeasureRequest.sourceIp); - CarbonScore destCarbonScore = this.pmeterParser.carbonAverageTraceRoute(carbonMeasureRequest.destinationIp); - double average = (double) (sourceCarbonScore.getAvgCarbon() + destCarbonScore.getAvgCarbon()) / 2; - CarbonMeasureResponse resp = new CarbonMeasureResponse(); - resp.transferNodeName = this.applicationName; - resp.averageCarbonIntensity = average; - logger.info("Response: CarbonMeasureResponse: {}", resp); - String jsonResp = this.objectMapper.writeValueAsString(resp); - MessagePostProcessor messagePostProcessor = this.embedMessageType(message.getMessageProperties().getCorrelationId()); - Message msg = MessageBuilder.withBody(jsonResp.getBytes()) - .setContentType(MediaType.APPLICATION_JSON_VALUE) - .build(); - this.rabbitTemplate.convertAndSend(message.getMessageProperties().getReplyTo(), msg, messagePostProcessor); + this.carbonAvgRequestHandler.messageHandler(message); } case CARBON_IP_REQUEST: { - CarbonMeasureRequest carbonMeasureRequest = objectMapper.readValue(jsonStr, CarbonMeasureRequest.class); - logger.info("Received CarbonMeasureRequest: {}", carbonMeasureRequest); - Map sourceTraceRouteCarbon = this.pmeterParser.carbonPerIp(carbonMeasureRequest.sourceIp); - Map destinationTraceRouteCarbon = this.pmeterParser.carbonPerIp(carbonMeasureRequest.destinationIp); - Map mergedMap = new HashMap<>(); - mergedMap.putAll(sourceTraceRouteCarbon); - mergedMap.putAll(destinationTraceRouteCarbon); - String jsonResp = this.objectMapper.writeValueAsString(mergedMap); - MessagePostProcessor messagePostProcessor = this.embedMessageType(message.getMessageProperties().getCorrelationId()); - Message msg = MessageBuilder.withBody(jsonResp.getBytes()) - .setContentType(MediaType.APPLICATION_JSON_VALUE) - .build(); - this.rabbitTemplate.convertAndSend(message.getMessageProperties().getReplyTo(), msg, messagePostProcessor); - + this.carbonIpRequestHandler.messageHandler(message); } } + channel.basicAck(tag, false); } - public MessagePostProcessor embedMessageType(String correlationId) { + public static MessagePostProcessor embedMessageType(String correlationId) { return message -> { message.getMessageProperties().setCorrelationId(correlationId); message.getMessageProperties().setType(CONTENT_TYPE_JSON); diff --git a/src/main/java/org/onedatashare/transferservice/odstransferservice/controller/JobMonitor.java b/src/main/java/org/onedatashare/transferservice/odstransferservice/controller/JobMonitor.java index 3db70aff..031d58b1 100644 --- a/src/main/java/org/onedatashare/transferservice/odstransferservice/controller/JobMonitor.java +++ b/src/main/java/org/onedatashare/transferservice/odstransferservice/controller/JobMonitor.java @@ -1,11 +1,9 @@ package org.onedatashare.transferservice.odstransferservice.controller; -import org.onedatashare.transferservice.odstransferservice.constant.ODSConstants; import org.onedatashare.transferservice.odstransferservice.model.BatchJobData; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.batch.core.JobExecution; -import org.springframework.batch.core.JobParameters; import org.springframework.batch.core.explore.JobExplorer; import org.springframework.http.ResponseEntity; import org.springframework.web.bind.annotation.GetMapping; @@ -13,7 +11,7 @@ import org.springframework.web.bind.annotation.RequestParam; import org.springframework.web.bind.annotation.RestController; -import java.util.*; +import java.util.Optional; @RequestMapping("/api/v1/job") @@ -21,13 +19,11 @@ public class JobMonitor { private final JobExplorer jobExplorer; - private Set jobIds; Logger logger = LoggerFactory.getLogger(JobMonitor.class); - public JobMonitor(JobExplorer jobExplorer, Set jobIds) { + public JobMonitor(JobExplorer jobExplorer) { this.jobExplorer = jobExplorer; - this.jobIds = jobIds; } @GetMapping("/execution") @@ -43,31 +39,4 @@ public ResponseEntity getJobExecution(@RequestParam("jobId") Optio return ResponseEntity.ok(BatchJobData.builder().build()); } } - - @GetMapping("/ids") - public ResponseEntity> getJobIdsRun() { - logger.info("Listing Job Ids"); - return ResponseEntity.ok(new ArrayList<>(this.jobIds)); - } - -// @GetMapping("/uuid") -// public ResponseEntity> getJobExec(@RequestParam Optional> jobIds){ -// List jobUuids = new ArrayList<>(); -// if(jobIds.isPresent()){ -// for(Long jobId: jobIds.get()){ -// JobExecution jobExecution = this.jobExplorer.getJobExecution(jobId); -// JobParameters jobParameters = jobExecution.getJobParameters(); -// String jobUuid = jobParameters.getString(ODSConstants.JOB_UUID); -// jobUuids.add(UUID.fromString(jobUuid)); -// } -// }else{ -// for(Long jobId : this.jobIds){ -// JobExecution jobExecution = this.jobExplorer.getJobExecution(jobId); -// JobParameters jobParameters = jobExecution.getJobParameters(); -// String jobUuid = jobParameters.getString(ODSConstants.JOB_UUID); -// jobUuids.add(UUID.fromString(jobUuid)); -// } -// } -// return ResponseEntity.ok(jobUuids); -// } } diff --git a/src/main/java/org/onedatashare/transferservice/odstransferservice/controller/TransferController.java b/src/main/java/org/onedatashare/transferservice/odstransferservice/controller/TransferController.java index d3ca4fd9..12b4c66f 100644 --- a/src/main/java/org/onedatashare/transferservice/odstransferservice/controller/TransferController.java +++ b/src/main/java/org/onedatashare/transferservice/odstransferservice/controller/TransferController.java @@ -30,13 +30,13 @@ public class TransferController { JobControl jc; - JobLauncher asyncJobLauncher; + JobLauncher jobLauncher; JobParamService jobParamService; - public TransferController(JobControl jobControl, JobLauncher asyncJobLauncher, JobParamService jobParamService) { + public TransferController(JobControl jobControl, JobLauncher jobLauncher, JobParamService jobParamService) { this.jc = jobControl; - this.asyncJobLauncher = asyncJobLauncher; + this.jobLauncher = jobLauncher; this.jobParamService = jobParamService; } @@ -46,9 +46,8 @@ public TransferController(JobControl jobControl, JobLauncher asyncJobLauncher, J public ResponseEntity start(@RequestBody TransferJobRequest request) throws Exception { logger.info("Controller Entry point"); JobParameters parameters = jobParamService.translate(new JobParametersBuilder(), request); - jc.setRequest(request); - Job job = jc.concurrentJobDefinition(); - JobExecution jobExecution = asyncJobLauncher.run(job, parameters); + Job job = jc.concurrentJobDefinition(request); + JobExecution jobExecution = jobLauncher.run(job, parameters); return ResponseEntity.status(HttpStatus.OK).body("Your batch job has been submitted with \n ID: " + jobExecution.getJobId()); } } diff --git a/src/main/java/org/onedatashare/transferservice/odstransferservice/message/CarbonAvgRequestHandler.java b/src/main/java/org/onedatashare/transferservice/odstransferservice/message/CarbonAvgRequestHandler.java new file mode 100644 index 00000000..a14c8765 --- /dev/null +++ b/src/main/java/org/onedatashare/transferservice/odstransferservice/message/CarbonAvgRequestHandler.java @@ -0,0 +1,63 @@ +package org.onedatashare.transferservice.odstransferservice.message; + +import com.fasterxml.jackson.annotation.JsonInclude; +import com.fasterxml.jackson.core.JsonProcessingException; +import com.fasterxml.jackson.databind.DeserializationFeature; +import com.fasterxml.jackson.databind.ObjectMapper; +import org.onedatashare.transferservice.odstransferservice.consumer.RabbitMQConsumer; +import org.onedatashare.transferservice.odstransferservice.model.CarbonMeasureRequest; +import org.onedatashare.transferservice.odstransferservice.model.CarbonMeasureResponse; +import org.onedatashare.transferservice.odstransferservice.model.metrics.CarbonScore; +import org.onedatashare.transferservice.odstransferservice.service.PmeterParser; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.springframework.amqp.core.Message; +import org.springframework.amqp.core.MessageBuilder; +import org.springframework.amqp.core.MessagePostProcessor; +import org.springframework.amqp.rabbit.core.RabbitTemplate; +import org.springframework.beans.factory.annotation.Value; +import org.springframework.http.MediaType; +import org.springframework.stereotype.Service; + +@Service +public class CarbonAvgRequestHandler implements MessageHandler { + + private final ObjectMapper objectMapper; + private final PmeterParser pmeterParser; + private final RabbitTemplate rabbitTemplate; + + @Value("${spring.application.name}") + String applicationName; + + + Logger logger = LoggerFactory.getLogger(CarbonAvgRequestHandler.class); + + public CarbonAvgRequestHandler(ObjectMapper messageObjectMapper, PmeterParser pmeterParser, RabbitTemplate rabbitTemplate) { + this.objectMapper = messageObjectMapper; + this.objectMapper.configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, true); + this.objectMapper.setDefaultPropertyInclusion(JsonInclude.Include.ALWAYS); + this.pmeterParser = pmeterParser; + this.rabbitTemplate = rabbitTemplate; + } + + @Override + public void messageHandler(Message message) throws JsonProcessingException { + String jsonStr = new String(message.getBody()); + CarbonMeasureRequest carbonMeasureRequest = objectMapper.readValue(jsonStr, CarbonMeasureRequest.class); + logger.info("Received CarbonMeasureRequest: {}", carbonMeasureRequest); + CarbonScore sourceCarbonScore = this.pmeterParser.carbonAverageTraceRoute(carbonMeasureRequest.sourceIp); + CarbonScore destCarbonScore = this.pmeterParser.carbonAverageTraceRoute(carbonMeasureRequest.destinationIp); + double average = (double) (sourceCarbonScore.getAvgCarbon() + destCarbonScore.getAvgCarbon()) / 2; + CarbonMeasureResponse resp = new CarbonMeasureResponse(); + resp.transferNodeName = this.applicationName; + resp.averageCarbonIntensity = average; + logger.info("Response: CarbonMeasureResponse: {}", resp); + String jsonResp = this.objectMapper.writeValueAsString(resp); + MessagePostProcessor messagePostProcessor = RabbitMQConsumer.embedMessageType(message.getMessageProperties().getCorrelationId()); + Message msg = MessageBuilder.withBody(jsonResp.getBytes()) + .setContentType(MediaType.APPLICATION_JSON_VALUE) + .build(); + this.rabbitTemplate.convertAndSend(message.getMessageProperties().getReplyTo(), msg, messagePostProcessor); + + } +} diff --git a/src/main/java/org/onedatashare/transferservice/odstransferservice/message/CarbonIpRequestHandler.java b/src/main/java/org/onedatashare/transferservice/odstransferservice/message/CarbonIpRequestHandler.java new file mode 100644 index 00000000..2310ac0a --- /dev/null +++ b/src/main/java/org/onedatashare/transferservice/odstransferservice/message/CarbonIpRequestHandler.java @@ -0,0 +1,55 @@ +package org.onedatashare.transferservice.odstransferservice.message; + +import com.fasterxml.jackson.core.JsonProcessingException; +import com.fasterxml.jackson.databind.ObjectMapper; +import org.onedatashare.transferservice.odstransferservice.consumer.RabbitMQConsumer; +import org.onedatashare.transferservice.odstransferservice.model.CarbonMeasureRequest; +import org.onedatashare.transferservice.odstransferservice.service.PmeterParser; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.springframework.amqp.core.Message; +import org.springframework.amqp.core.MessageBuilder; +import org.springframework.amqp.core.MessagePostProcessor; +import org.springframework.amqp.rabbit.core.RabbitTemplate; +import org.springframework.http.MediaType; +import org.springframework.stereotype.Service; + +import java.util.HashMap; +import java.util.Map; + +@Service +public class CarbonIpRequestHandler implements MessageHandler { + + private final ObjectMapper objectMapper; + private final PmeterParser pmeterParser; + private final RabbitTemplate rabbitTemplate; + + Logger logger = LoggerFactory.getLogger(CarbonIpRequestHandler.class); + + public CarbonIpRequestHandler(ObjectMapper messageObjectMapper, PmeterParser pmeterParser, RabbitTemplate rabbitTemplate) { + this.objectMapper = messageObjectMapper; + this.pmeterParser = pmeterParser; + this.rabbitTemplate = rabbitTemplate; + + } + + @Override + public void messageHandler(Message message) throws JsonProcessingException { + String jsonStr = new String(message.getBody()); + CarbonMeasureRequest carbonMeasureRequest = objectMapper.readValue(jsonStr, CarbonMeasureRequest.class); + logger.info("Received CarbonMeasureRequest: {}", carbonMeasureRequest); + Map sourceTraceRouteCarbon = this.pmeterParser.carbonPerIp(carbonMeasureRequest.sourceIp); + Map destinationTraceRouteCarbon = this.pmeterParser.carbonPerIp(carbonMeasureRequest.destinationIp); + Map mergedMap = new HashMap<>(); + mergedMap.putAll(sourceTraceRouteCarbon); + mergedMap.putAll(destinationTraceRouteCarbon); + String jsonResp = this.objectMapper.writeValueAsString(mergedMap); + MessagePostProcessor messagePostProcessor = RabbitMQConsumer.embedMessageType(message.getMessageProperties().getCorrelationId()); + Message msg = MessageBuilder.withBody(jsonResp.getBytes()) + .setContentType(MediaType.APPLICATION_JSON_VALUE) + .build(); + logger.info("Sending reply too: {}", message.getMessageProperties().getReplyTo()); + this.rabbitTemplate.convertAndSend(message.getMessageProperties().getReplyTo(), msg, messagePostProcessor); + + } +} diff --git a/src/main/java/org/onedatashare/transferservice/odstransferservice/message/MessageHandler.java b/src/main/java/org/onedatashare/transferservice/odstransferservice/message/MessageHandler.java new file mode 100644 index 00000000..e29d021c --- /dev/null +++ b/src/main/java/org/onedatashare/transferservice/odstransferservice/message/MessageHandler.java @@ -0,0 +1,8 @@ +package org.onedatashare.transferservice.odstransferservice.message; + +import com.fasterxml.jackson.core.JsonProcessingException; +import org.springframework.amqp.core.Message; + +public interface MessageHandler { + void messageHandler(Message message) throws JsonProcessingException; +} diff --git a/src/main/java/org/onedatashare/transferservice/odstransferservice/message/TransferApplicationParamHandler.java b/src/main/java/org/onedatashare/transferservice/odstransferservice/message/TransferApplicationParamHandler.java new file mode 100644 index 00000000..27b6b266 --- /dev/null +++ b/src/main/java/org/onedatashare/transferservice/odstransferservice/message/TransferApplicationParamHandler.java @@ -0,0 +1,32 @@ +package org.onedatashare.transferservice.odstransferservice.message; + +import com.fasterxml.jackson.core.JsonProcessingException; +import com.fasterxml.jackson.databind.ObjectMapper; +import org.onedatashare.transferservice.odstransferservice.model.optimizer.TransferApplicationParams; +import org.onedatashare.transferservice.odstransferservice.pools.ThreadPoolContract; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.springframework.amqp.core.Message; +import org.springframework.stereotype.Service; + +@Service +public class TransferApplicationParamHandler implements MessageHandler{ + + private final ObjectMapper mesageObjectMapper; + private final ThreadPoolContract threadPool; + Logger logger = LoggerFactory.getLogger(TransferApplicationParamHandler.class); + + public TransferApplicationParamHandler(ObjectMapper messageObjectMapper, ThreadPoolContract threadPool){ + this.mesageObjectMapper = messageObjectMapper; + this.threadPool = threadPool; + } + + @Override + public void messageHandler(Message message) throws JsonProcessingException { + String jsonStr = new String(message.getBody()); + TransferApplicationParams params = mesageObjectMapper.readValue(jsonStr, TransferApplicationParams.class); + logger.info("Parsed TransferApplicationParams: {}", params); + this.threadPool.applyOptimizer(params.getConcurrency(), params.getParallelism()); + + } +} diff --git a/src/main/java/org/onedatashare/transferservice/odstransferservice/message/TransferJobRequestHandler.java b/src/main/java/org/onedatashare/transferservice/odstransferservice/message/TransferJobRequestHandler.java new file mode 100644 index 00000000..6cd6560b --- /dev/null +++ b/src/main/java/org/onedatashare/transferservice/odstransferservice/message/TransferJobRequestHandler.java @@ -0,0 +1,45 @@ +package org.onedatashare.transferservice.odstransferservice.message; + +import com.fasterxml.jackson.core.JsonProcessingException; +import com.fasterxml.jackson.databind.ObjectMapper; +import org.onedatashare.transferservice.odstransferservice.model.TransferJobRequest; +import org.onedatashare.transferservice.odstransferservice.service.JobControl; +import org.onedatashare.transferservice.odstransferservice.service.JobParamService; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.springframework.amqp.core.Message; +import org.springframework.batch.core.JobParameters; +import org.springframework.batch.core.JobParametersBuilder; +import org.springframework.batch.core.launch.JobLauncher; +import org.springframework.stereotype.Service; + +@Service +public class TransferJobRequestHandler implements MessageHandler { + + private final ObjectMapper objectMapper; + private final JobParamService jobParamService; + private final JobLauncher jobLauncher; + private final JobControl jobControl; + + Logger logger = LoggerFactory.getLogger(TransferJobRequestHandler.class); + + public TransferJobRequestHandler(ObjectMapper messageObjectMapper, JobParamService jobParamService, JobLauncher jobLauncher, JobControl jobControl) { + this.objectMapper = messageObjectMapper; + this.jobParamService = jobParamService; + this.jobLauncher = jobLauncher; + this.jobControl = jobControl; + } + + @Override + public void messageHandler(Message message) throws JsonProcessingException { + String jsonStr = new String(message.getBody()); + TransferJobRequest request = objectMapper.readValue(jsonStr, TransferJobRequest.class); + logger.info("Job Received: {}", request.toString()); + JobParameters parameters = jobParamService.translate(new JobParametersBuilder(), request); + try { + jobLauncher.run(jobControl.concurrentJobDefinition(request), parameters); + } catch (Exception e) { + logger.error(e.getMessage()); + } + } +} diff --git a/src/main/java/org/onedatashare/transferservice/odstransferservice/service/JobControl.java b/src/main/java/org/onedatashare/transferservice/odstransferservice/service/JobControl.java index ae91699a..4d17a936 100644 --- a/src/main/java/org/onedatashare/transferservice/odstransferservice/service/JobControl.java +++ b/src/main/java/org/onedatashare/transferservice/odstransferservice/service/JobControl.java @@ -35,8 +35,6 @@ @Setter public class JobControl { - public TransferJobRequest request; - Logger logger = LoggerFactory.getLogger(JobControl.class); @Autowired @@ -66,8 +64,9 @@ public class JobControl { @Autowired BackOffPolicy backOffPolicy; - private List createConcurrentFlow(String basePath) { - List fileInfo = expanderFactory.getExpander(this.request.getSource()); + private List createConcurrentFlow(TransferJobRequest request) { + String basePath = request.getSource().getFileSourcePath(); + List fileInfo = expanderFactory.getExpander(request.getSource()); return fileInfo.stream().map(file -> { String idForStep = ""; if (!file.getId().isEmpty()) { @@ -76,11 +75,11 @@ private List createConcurrentFlow(String basePath) { idForStep = file.getPath(); } SimpleStepBuilder stepBuilder = new StepBuilder(idForStep, this.jobRepository) - .chunk(this.request.getOptions().getPipeSize(), this.platformTransactionManager); + .chunk(request.getOptions().getPipeSize(), this.platformTransactionManager); stepBuilder - .reader(readerWriterFactory.getRightReader(this.request.getSource(), file, this.request.getOptions())) + .reader(readerWriterFactory.getRightReader(request.getSource(), file, request.getOptions())) .writer(readerWriterFactory.getRightWriter(request.getDestination(), file)); - if (this.request.getOptions().getParallelThreadCount() > 0) { + if (request.getOptions().getParallelThreadCount() > 0) { stepBuilder.taskExecutor(threadPool.parallelPool(request.getOptions().getParallelThreadCount(), file.getPath())); } stepBuilder.throttleLimit(64); @@ -91,14 +90,14 @@ private List createConcurrentFlow(String basePath) { }).collect(Collectors.toList()); } - public Job concurrentJobDefinition() { - JobBuilder jobBuilder = new JobBuilder(this.request.getJobUuid().toString(), this.jobRepository); - connectionBag.preparePools(this.request); - List flows = createConcurrentFlow(request.getSource().getFileSourcePath()); - this.influxIOService.reconfigureBucketForNewJob(this.request.getOwnerId()); + public Job concurrentJobDefinition(TransferJobRequest request) { + JobBuilder jobBuilder = new JobBuilder(request.getJobUuid().toString(), this.jobRepository); + connectionBag.preparePools(request); + List flows = createConcurrentFlow(request); + this.influxIOService.reconfigureBucketForNewJob(request.getOwnerId()); Flow[] fl = new Flow[flows.size()]; Flow f = new FlowBuilder("splitFlow") - .split(this.threadPool.stepPool(this.request.getOptions().getConcurrencyThreadCount())) + .split(this.threadPool.stepPool(request.getOptions().getConcurrencyThreadCount())) .add(flows.toArray(fl)) .build(); return jobBuilder diff --git a/src/main/java/org/onedatashare/transferservice/odstransferservice/service/PmeterParser.java b/src/main/java/org/onedatashare/transferservice/odstransferservice/service/PmeterParser.java index e33af620..e13a6a55 100644 --- a/src/main/java/org/onedatashare/transferservice/odstransferservice/service/PmeterParser.java +++ b/src/main/java/org/onedatashare/transferservice/odstransferservice/service/PmeterParser.java @@ -1,6 +1,8 @@ package org.onedatashare.transferservice.odstransferservice.service; import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializationFeature; +import com.fasterxml.jackson.datatype.jsr310.JavaTimeModule; import jakarta.annotation.PostConstruct; import org.apache.commons.exec.CommandLine; import org.apache.commons.exec.DefaultExecutor; @@ -69,7 +71,7 @@ public void init() { measureCount, pmeterOptions, pmeterMetricsPath)); } - public PmeterParser(ObjectMapper pmeterMapper) { + public PmeterParser() { this.outputStream = new ByteArrayOutputStream(); this.streamHandler = new PumpStreamHandler(outputStream); @@ -78,7 +80,9 @@ public PmeterParser(ObjectMapper pmeterMapper) { pmeterExecutor.setWatchdog(watchDog); pmeterExecutor.setStreamHandler(streamHandler); - this.pmeterMapper = pmeterMapper; + this.pmeterMapper = new ObjectMapper(); + this.pmeterMapper.registerModule(new JavaTimeModule()); + this.pmeterMapper.configure(SerializationFeature.WRITE_DATE_KEYS_AS_TIMESTAMPS, false); } diff --git a/src/main/java/org/onedatashare/transferservice/odstransferservice/service/listner/JobCompletionListener.java b/src/main/java/org/onedatashare/transferservice/odstransferservice/service/listner/JobCompletionListener.java index f6060826..76f22ce6 100644 --- a/src/main/java/org/onedatashare/transferservice/odstransferservice/service/listner/JobCompletionListener.java +++ b/src/main/java/org/onedatashare/transferservice/odstransferservice/service/listner/JobCompletionListener.java @@ -5,8 +5,8 @@ import org.onedatashare.transferservice.odstransferservice.model.optimizer.OptimizerDeleteRequest; import org.onedatashare.transferservice.odstransferservice.pools.ThreadPoolContract; import org.onedatashare.transferservice.odstransferservice.service.ConnectionBag; -import org.onedatashare.transferservice.odstransferservice.service.OptimizerService; import org.onedatashare.transferservice.odstransferservice.service.MetricsCollector; +import org.onedatashare.transferservice.odstransferservice.service.OptimizerService; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.batch.core.JobExecution; diff --git a/src/main/resources/application.properties b/src/main/resources/application.properties index 451301b0..02d43d8c 100644 --- a/src/main/resources/application.properties +++ b/src/main/resources/application.properties @@ -32,6 +32,7 @@ spring.rabbitmq.port=${AMPQ_PORT:5672} spring.rabbitmq.username=${AMPQ_USER:guest} spring.rabbitmq.password=${AMPQ_PWD:guest} spring.rabbitmq.listener.direct.prefetch=1 +spring.rabbitmq.connection-timeout=20000 #optimizer optimizer.url=${OPTIMIZER_URL:http://localhost:8088} @@ -44,6 +45,7 @@ pmeter.measure=${PMETER_MEASURE:1} pmeter.options=${PMETER_CLI_OPTIONS:"-KNS"} pmeter.cron.expression=${PMETER_CRON_EXP:*/5 * * * * *} pmeter.carbon.path=${PMETER_CARBON_PATH:${HOME}/.pmeter/carbon_pmeter.txt} +pmeter.carbon.map=${PMETER_CARBON_MAP:${HOME}/.pmeter/carbon_ip_map.json} pmeter.carbon.toggle=${REPORT_CARBON:false} #influx From a009fdda72a818adafb7b762782143563848b052 Mon Sep 17 00:00:00 2001 From: Jacob Goldverg Date: Thu, 4 Apr 2024 15:19:46 -0400 Subject: [PATCH 097/150] removed the nasty map so ip's stayed ordered formally, first entry is the first hop we measure --- .../message/CarbonIpRequestHandler.java | 15 ++--- .../message/MessageHandler.java | 4 +- .../TransferApplicationParamHandler.java | 4 +- .../message/TransferJobRequestHandler.java | 4 +- .../model/CarbonIpEntry.java | 11 ++++ .../service/PmeterParser.java | 55 ++++++++++--------- 6 files changed, 56 insertions(+), 37 deletions(-) create mode 100644 src/main/java/org/onedatashare/transferservice/odstransferservice/model/CarbonIpEntry.java diff --git a/src/main/java/org/onedatashare/transferservice/odstransferservice/message/CarbonIpRequestHandler.java b/src/main/java/org/onedatashare/transferservice/odstransferservice/message/CarbonIpRequestHandler.java index 2310ac0a..87e80c17 100644 --- a/src/main/java/org/onedatashare/transferservice/odstransferservice/message/CarbonIpRequestHandler.java +++ b/src/main/java/org/onedatashare/transferservice/odstransferservice/message/CarbonIpRequestHandler.java @@ -3,6 +3,7 @@ import com.fasterxml.jackson.core.JsonProcessingException; import com.fasterxml.jackson.databind.ObjectMapper; import org.onedatashare.transferservice.odstransferservice.consumer.RabbitMQConsumer; +import org.onedatashare.transferservice.odstransferservice.model.CarbonIpEntry; import org.onedatashare.transferservice.odstransferservice.model.CarbonMeasureRequest; import org.onedatashare.transferservice.odstransferservice.service.PmeterParser; import org.slf4j.Logger; @@ -14,7 +15,9 @@ import org.springframework.http.MediaType; import org.springframework.stereotype.Service; +import java.io.IOException; import java.util.HashMap; +import java.util.List; import java.util.Map; @Service @@ -34,16 +37,14 @@ public CarbonIpRequestHandler(ObjectMapper messageObjectMapper, PmeterParser pme } @Override - public void messageHandler(Message message) throws JsonProcessingException { + public void messageHandler(Message message) throws IOException { String jsonStr = new String(message.getBody()); CarbonMeasureRequest carbonMeasureRequest = objectMapper.readValue(jsonStr, CarbonMeasureRequest.class); logger.info("Received CarbonMeasureRequest: {}", carbonMeasureRequest); - Map sourceTraceRouteCarbon = this.pmeterParser.carbonPerIp(carbonMeasureRequest.sourceIp); - Map destinationTraceRouteCarbon = this.pmeterParser.carbonPerIp(carbonMeasureRequest.destinationIp); - Map mergedMap = new HashMap<>(); - mergedMap.putAll(sourceTraceRouteCarbon); - mergedMap.putAll(destinationTraceRouteCarbon); - String jsonResp = this.objectMapper.writeValueAsString(mergedMap); + List sourceTraceRouteCarbon = this.pmeterParser.carbonPerIp(carbonMeasureRequest.sourceIp); + List destinationTraceRouteCarbon = this.pmeterParser.carbonPerIp(carbonMeasureRequest.destinationIp); + sourceTraceRouteCarbon.addAll(destinationTraceRouteCarbon); + String jsonResp = this.objectMapper.writeValueAsString(sourceTraceRouteCarbon); MessagePostProcessor messagePostProcessor = RabbitMQConsumer.embedMessageType(message.getMessageProperties().getCorrelationId()); Message msg = MessageBuilder.withBody(jsonResp.getBytes()) .setContentType(MediaType.APPLICATION_JSON_VALUE) diff --git a/src/main/java/org/onedatashare/transferservice/odstransferservice/message/MessageHandler.java b/src/main/java/org/onedatashare/transferservice/odstransferservice/message/MessageHandler.java index e29d021c..210bc944 100644 --- a/src/main/java/org/onedatashare/transferservice/odstransferservice/message/MessageHandler.java +++ b/src/main/java/org/onedatashare/transferservice/odstransferservice/message/MessageHandler.java @@ -3,6 +3,8 @@ import com.fasterxml.jackson.core.JsonProcessingException; import org.springframework.amqp.core.Message; +import java.io.IOException; + public interface MessageHandler { - void messageHandler(Message message) throws JsonProcessingException; + void messageHandler(Message message) throws IOException; } diff --git a/src/main/java/org/onedatashare/transferservice/odstransferservice/message/TransferApplicationParamHandler.java b/src/main/java/org/onedatashare/transferservice/odstransferservice/message/TransferApplicationParamHandler.java index 27b6b266..60a918cd 100644 --- a/src/main/java/org/onedatashare/transferservice/odstransferservice/message/TransferApplicationParamHandler.java +++ b/src/main/java/org/onedatashare/transferservice/odstransferservice/message/TransferApplicationParamHandler.java @@ -9,6 +9,8 @@ import org.springframework.amqp.core.Message; import org.springframework.stereotype.Service; +import java.io.IOException; + @Service public class TransferApplicationParamHandler implements MessageHandler{ @@ -22,7 +24,7 @@ public TransferApplicationParamHandler(ObjectMapper messageObjectMapper, ThreadP } @Override - public void messageHandler(Message message) throws JsonProcessingException { + public void messageHandler(Message message) throws IOException { String jsonStr = new String(message.getBody()); TransferApplicationParams params = mesageObjectMapper.readValue(jsonStr, TransferApplicationParams.class); logger.info("Parsed TransferApplicationParams: {}", params); diff --git a/src/main/java/org/onedatashare/transferservice/odstransferservice/message/TransferJobRequestHandler.java b/src/main/java/org/onedatashare/transferservice/odstransferservice/message/TransferJobRequestHandler.java index 6cd6560b..b2c6f242 100644 --- a/src/main/java/org/onedatashare/transferservice/odstransferservice/message/TransferJobRequestHandler.java +++ b/src/main/java/org/onedatashare/transferservice/odstransferservice/message/TransferJobRequestHandler.java @@ -13,6 +13,8 @@ import org.springframework.batch.core.launch.JobLauncher; import org.springframework.stereotype.Service; +import java.io.IOException; + @Service public class TransferJobRequestHandler implements MessageHandler { @@ -31,7 +33,7 @@ public TransferJobRequestHandler(ObjectMapper messageObjectMapper, JobParamServi } @Override - public void messageHandler(Message message) throws JsonProcessingException { + public void messageHandler(Message message) throws IOException { String jsonStr = new String(message.getBody()); TransferJobRequest request = objectMapper.readValue(jsonStr, TransferJobRequest.class); logger.info("Job Received: {}", request.toString()); diff --git a/src/main/java/org/onedatashare/transferservice/odstransferservice/model/CarbonIpEntry.java b/src/main/java/org/onedatashare/transferservice/odstransferservice/model/CarbonIpEntry.java new file mode 100644 index 00000000..5595f6d0 --- /dev/null +++ b/src/main/java/org/onedatashare/transferservice/odstransferservice/model/CarbonIpEntry.java @@ -0,0 +1,11 @@ +package org.onedatashare.transferservice.odstransferservice.model; + +import lombok.Data; + +@Data +public class CarbonIpEntry { + String ip; + int carbonIntensity; + double lat; + double lon; +} diff --git a/src/main/java/org/onedatashare/transferservice/odstransferservice/service/PmeterParser.java b/src/main/java/org/onedatashare/transferservice/odstransferservice/service/PmeterParser.java index e13a6a55..13f90fc3 100644 --- a/src/main/java/org/onedatashare/transferservice/odstransferservice/service/PmeterParser.java +++ b/src/main/java/org/onedatashare/transferservice/odstransferservice/service/PmeterParser.java @@ -1,5 +1,6 @@ package org.onedatashare.transferservice.odstransferservice.service; +import com.fasterxml.jackson.core.type.TypeReference; import com.fasterxml.jackson.databind.ObjectMapper; import com.fasterxml.jackson.databind.SerializationFeature; import com.fasterxml.jackson.datatype.jsr310.JavaTimeModule; @@ -8,6 +9,7 @@ import org.apache.commons.exec.DefaultExecutor; import org.apache.commons.exec.ExecuteWatchdog; import org.apache.commons.exec.PumpStreamHandler; +import org.onedatashare.transferservice.odstransferservice.model.CarbonIpEntry; import org.onedatashare.transferservice.odstransferservice.model.metrics.CarbonScore; import org.onedatashare.transferservice.odstransferservice.model.metrics.DataInflux; import org.slf4j.Logger; @@ -20,10 +22,7 @@ import java.nio.file.Files; import java.nio.file.Path; import java.nio.file.Paths; -import java.util.ArrayList; -import java.util.HashMap; -import java.util.List; -import java.util.Map; +import java.util.*; @Service public class PmeterParser { @@ -138,30 +137,32 @@ public CarbonScore carbonAverageTraceRoute(String ip) { } } - public Map carbonPerIp(String ip) { - if (ip == null || ip.isEmpty()) return new HashMap<>(); + public List carbonPerIp(String ip) throws IOException{ + if (ip == null || ip.isEmpty()) return new ArrayList<>(); CommandLine carbonCmd = CommandLine.parse(String.format("pmeter carbon %s --save_per_ip=True", ip)); - try { - DefaultExecutor carbonExecutor = new DefaultExecutor(); - carbonExecutor.execute(carbonCmd); - } catch (IOException e) { - e.printStackTrace(); - return new HashMap<>(); - } - try { - Path filePath = Paths.get(this.pmeterCarbonMapPath); - logger.info("Pmeter Carbon map file path: {}", filePath); - List lines = Files.readAllLines(filePath); - logger.info("CarbonMap lines: {}", lines); - String lastLine = lines.getLast(); - HashMap measurement = this.pmeterMapper.readValue(lastLine, HashMap.class); - filePath.toFile().delete(); - filePath.toFile().createNewFile(); - logger.info("Carbon IP Map: {}", measurement); - return measurement; - } catch (IOException e) { - e.printStackTrace(); + DefaultExecutor carbonExecutor = new DefaultExecutor(); + carbonExecutor.execute(carbonCmd); + Path filePath = Paths.get(this.pmeterCarbonMapPath); + logger.info("Pmeter Carbon map file path: {}", filePath); + List lines = Files.readAllLines(filePath); + logger.info("CarbonMap lines: {}", lines); + String lastLine = lines.getLast(); + + Map measurement = this.pmeterMapper.readValue(lastLine, new TypeReference>() {}); + List retList = new ArrayList<>(); + for(Map.Entry entry: measurement.entrySet()){ + if(entry.getKey().equals("time")) continue;; + LinkedHashMap value = (LinkedHashMap) entry.getValue(); + CarbonIpEntry carbonIpEntry = new CarbonIpEntry(); + carbonIpEntry.setIp(entry.getKey()); + carbonIpEntry.setCarbonIntensity((int) value.get("carbon_intensity")); + carbonIpEntry.setLon((double) value.get("lon")); + carbonIpEntry.setLat((double) value.get("lat")); + retList.add(carbonIpEntry); } - return new HashMap<>(); + filePath.toFile().delete(); + filePath.toFile().createNewFile(); + logger.info("Carbon IP List: {}", retList); + return retList; } } From d897c48c4d6aab1535a5d03697898445aa68d568 Mon Sep 17 00:00:00 2001 From: Jacob Goldverg Date: Mon, 8 Apr 2024 17:41:04 -0400 Subject: [PATCH 098/150] should be more or less done need to test on chameleon --- .../config/RabbitMQConfig.java | 2 +- .../consumer/RabbitMQConsumer.java | 43 ++++++++++--------- .../message/CarbonAvgRequestHandler.java | 5 ++- .../message/TransferJobRequestHandler.java | 1 - .../service/PmeterParser.java | 35 ++++++--------- src/main/resources/application.properties | 1 - 6 files changed, 39 insertions(+), 48 deletions(-) diff --git a/src/main/java/org/onedatashare/transferservice/odstransferservice/config/RabbitMQConfig.java b/src/main/java/org/onedatashare/transferservice/odstransferservice/config/RabbitMQConfig.java index 5e30e1bc..45a9ff62 100644 --- a/src/main/java/org/onedatashare/transferservice/odstransferservice/config/RabbitMQConfig.java +++ b/src/main/java/org/onedatashare/transferservice/odstransferservice/config/RabbitMQConfig.java @@ -42,7 +42,7 @@ public SimpleRabbitListenerContainerFactory rabbitListenerContainerFactory(Conne SimpleRabbitListenerContainerFactory factory = new SimpleRabbitListenerContainerFactory(); factory.setConnectionFactory(connectionFactory); factory.setPrefetchCount(1); // Set prefetch count to 1 - factory.setAcknowledgeMode(AcknowledgeMode.MANUAL); // Use manual acknowledgment mode + factory.setAcknowledgeMode(AcknowledgeMode.AUTO); return factory; } diff --git a/src/main/java/org/onedatashare/transferservice/odstransferservice/consumer/RabbitMQConsumer.java b/src/main/java/org/onedatashare/transferservice/odstransferservice/consumer/RabbitMQConsumer.java index 386ff9ed..8055e29d 100644 --- a/src/main/java/org/onedatashare/transferservice/odstransferservice/consumer/RabbitMQConsumer.java +++ b/src/main/java/org/onedatashare/transferservice/odstransferservice/consumer/RabbitMQConsumer.java @@ -1,8 +1,6 @@ package org.onedatashare.transferservice.odstransferservice.consumer; -import com.fasterxml.jackson.core.JsonProcessingException; -import com.rabbitmq.client.Channel; import org.onedatashare.transferservice.odstransferservice.Enum.MessageType; import org.onedatashare.transferservice.odstransferservice.message.CarbonAvgRequestHandler; import org.onedatashare.transferservice.odstransferservice.message.CarbonIpRequestHandler; @@ -13,8 +11,6 @@ import org.springframework.amqp.core.Queue; import org.springframework.amqp.rabbit.annotation.RabbitListener; import org.springframework.amqp.rabbit.core.RabbitTemplate; -import org.springframework.amqp.support.AmqpHeaders; -import org.springframework.messaging.handler.annotation.Header; import org.springframework.stereotype.Service; import java.io.IOException; @@ -45,26 +41,31 @@ public RabbitMQConsumer(RabbitTemplate rabbitTemplate, Queue userQueue, Transfer } @RabbitListener(queues = "#{userQueue}") - public void consumeDefaultMessage(Message message, Channel channel, @Header(AmqpHeaders.DELIVERY_TAG) long tag) throws IOException { + public void consumeDefaultMessage(Message message) { MessageType messageType = MessageType.valueOf(message.getMessageProperties().getHeader("type")); - switch (messageType) { - case TRANSFER_JOB_REQUEST: { - this.transferJobRequestHandler.messageHandler(message); - } - - case APPLICATION_PARAM_CHANGE: { - this.transferApplicationParamHandler.messageHandler(message); - } - - case CARBON_AVG_REQUEST: { - this.carbonAvgRequestHandler.messageHandler(message); - } - - case CARBON_IP_REQUEST: { - this.carbonIpRequestHandler.messageHandler(message); + try { + switch (messageType) { + case TRANSFER_JOB_REQUEST: { + this.transferJobRequestHandler.messageHandler(message); + } + + case APPLICATION_PARAM_CHANGE: { + this.transferApplicationParamHandler.messageHandler(message); + } + + case CARBON_AVG_REQUEST: { + this.carbonAvgRequestHandler.messageHandler(message); + } + + case CARBON_IP_REQUEST: { + this.carbonIpRequestHandler.messageHandler(message); + } } + } catch (IOException e) { + e.printStackTrace(); } - channel.basicAck(tag, false); + +// channel.basicAck(tag, false); } public static MessagePostProcessor embedMessageType(String correlationId) { diff --git a/src/main/java/org/onedatashare/transferservice/odstransferservice/message/CarbonAvgRequestHandler.java b/src/main/java/org/onedatashare/transferservice/odstransferservice/message/CarbonAvgRequestHandler.java index a14c8765..d0f5aa2a 100644 --- a/src/main/java/org/onedatashare/transferservice/odstransferservice/message/CarbonAvgRequestHandler.java +++ b/src/main/java/org/onedatashare/transferservice/odstransferservice/message/CarbonAvgRequestHandler.java @@ -1,7 +1,6 @@ package org.onedatashare.transferservice.odstransferservice.message; import com.fasterxml.jackson.annotation.JsonInclude; -import com.fasterxml.jackson.core.JsonProcessingException; import com.fasterxml.jackson.databind.DeserializationFeature; import com.fasterxml.jackson.databind.ObjectMapper; import org.onedatashare.transferservice.odstransferservice.consumer.RabbitMQConsumer; @@ -19,6 +18,8 @@ import org.springframework.http.MediaType; import org.springframework.stereotype.Service; +import java.io.IOException; + @Service public class CarbonAvgRequestHandler implements MessageHandler { @@ -41,7 +42,7 @@ public CarbonAvgRequestHandler(ObjectMapper messageObjectMapper, PmeterParser pm } @Override - public void messageHandler(Message message) throws JsonProcessingException { + public void messageHandler(Message message) throws IOException { String jsonStr = new String(message.getBody()); CarbonMeasureRequest carbonMeasureRequest = objectMapper.readValue(jsonStr, CarbonMeasureRequest.class); logger.info("Received CarbonMeasureRequest: {}", carbonMeasureRequest); diff --git a/src/main/java/org/onedatashare/transferservice/odstransferservice/message/TransferJobRequestHandler.java b/src/main/java/org/onedatashare/transferservice/odstransferservice/message/TransferJobRequestHandler.java index b2c6f242..cd57e1f8 100644 --- a/src/main/java/org/onedatashare/transferservice/odstransferservice/message/TransferJobRequestHandler.java +++ b/src/main/java/org/onedatashare/transferservice/odstransferservice/message/TransferJobRequestHandler.java @@ -1,6 +1,5 @@ package org.onedatashare.transferservice.odstransferservice.message; -import com.fasterxml.jackson.core.JsonProcessingException; import com.fasterxml.jackson.databind.ObjectMapper; import org.onedatashare.transferservice.odstransferservice.model.TransferJobRequest; import org.onedatashare.transferservice.odstransferservice.service.JobControl; diff --git a/src/main/java/org/onedatashare/transferservice/odstransferservice/service/PmeterParser.java b/src/main/java/org/onedatashare/transferservice/odstransferservice/service/PmeterParser.java index 13f90fc3..f7c5c40e 100644 --- a/src/main/java/org/onedatashare/transferservice/odstransferservice/service/PmeterParser.java +++ b/src/main/java/org/onedatashare/transferservice/odstransferservice/service/PmeterParser.java @@ -22,7 +22,10 @@ import java.nio.file.Files; import java.nio.file.Path; import java.nio.file.Paths; -import java.util.*; +import java.util.ArrayList; +import java.util.LinkedHashMap; +import java.util.List; +import java.util.Map; @Service public class PmeterParser { @@ -64,10 +67,7 @@ public class PmeterParser { @PostConstruct public void init() { - this.cmdLine = CommandLine.parse( - String.format("pmeter " + MEASURE + " %s --user %s --measure %s %s --file_name %s", - pmeterNic, odsUser, - measureCount, pmeterOptions, pmeterMetricsPath)); + this.cmdLine = CommandLine.parse(String.format("pmeter " + MEASURE + " %s --user %s --measure %s %s --file_name %s", pmeterNic, odsUser, measureCount, pmeterOptions, pmeterMetricsPath)); } public PmeterParser() { @@ -109,19 +109,12 @@ public List parsePmeterOutput() throws IOException { public CarbonScore carbonAverageTraceRoute(String ip) { //pmeter carbon 129.114.108.45 - if (this.toggle == false) { - return new CarbonScore(); - } - if (ip == null || ip.isEmpty()) return new CarbonScore(); + if (this.toggle == false || ip == null || ip.isEmpty()) return new CarbonScore(); + CommandLine carbonCmd = CommandLine.parse(String.format("pmeter carbon %s", ip)); try { DefaultExecutor carbonExecutor = new DefaultExecutor(); carbonExecutor.execute(carbonCmd); - } catch (IOException e) { - e.printStackTrace(); - return new CarbonScore(); - } - try { Path filePath = Paths.get(this.pmeterCarbonPath); List lines = Files.readAllLines(filePath); CarbonScore score = new CarbonScore(); @@ -133,25 +126,24 @@ public CarbonScore carbonAverageTraceRoute(String ip) { filePath.toFile().createNewFile(); return score; } catch (IOException e) { + e.printStackTrace(); return new CarbonScore(); } } - public List carbonPerIp(String ip) throws IOException{ + public List carbonPerIp(String ip) throws IOException { if (ip == null || ip.isEmpty()) return new ArrayList<>(); CommandLine carbonCmd = CommandLine.parse(String.format("pmeter carbon %s --save_per_ip=True", ip)); DefaultExecutor carbonExecutor = new DefaultExecutor(); carbonExecutor.execute(carbonCmd); Path filePath = Paths.get(this.pmeterCarbonMapPath); - logger.info("Pmeter Carbon map file path: {}", filePath); List lines = Files.readAllLines(filePath); - logger.info("CarbonMap lines: {}", lines); String lastLine = lines.getLast(); - - Map measurement = this.pmeterMapper.readValue(lastLine, new TypeReference>() {}); + Map measurement = this.pmeterMapper.readValue(lastLine, new TypeReference>() { + }); List retList = new ArrayList<>(); - for(Map.Entry entry: measurement.entrySet()){ - if(entry.getKey().equals("time")) continue;; + for (Map.Entry entry : measurement.entrySet()) { + if (entry.getKey().equals("time")) continue; LinkedHashMap value = (LinkedHashMap) entry.getValue(); CarbonIpEntry carbonIpEntry = new CarbonIpEntry(); carbonIpEntry.setIp(entry.getKey()); @@ -162,7 +154,6 @@ public List carbonPerIp(String ip) throws IOException{ } filePath.toFile().delete(); filePath.toFile().createNewFile(); - logger.info("Carbon IP List: {}", retList); return retList; } } diff --git a/src/main/resources/application.properties b/src/main/resources/application.properties index 02d43d8c..d6e22cdf 100644 --- a/src/main/resources/application.properties +++ b/src/main/resources/application.properties @@ -31,7 +31,6 @@ spring.rabbitmq.addresses=${AMPQ_ADDRESS} spring.rabbitmq.port=${AMPQ_PORT:5672} spring.rabbitmq.username=${AMPQ_USER:guest} spring.rabbitmq.password=${AMPQ_PWD:guest} -spring.rabbitmq.listener.direct.prefetch=1 spring.rabbitmq.connection-timeout=20000 #optimizer From 8c4e4d1e68b91cbf8630b23f250b3787f4e44282 Mon Sep 17 00:00:00 2001 From: Jacob Goldverg Date: Mon, 8 Apr 2024 23:25:44 -0400 Subject: [PATCH 099/150] Fixed the binding key to be correct now --- .../odstransferservice/config/RabbitMQConfig.java | 5 +---- src/main/resources/application.properties | 1 - 2 files changed, 1 insertion(+), 5 deletions(-) diff --git a/src/main/java/org/onedatashare/transferservice/odstransferservice/config/RabbitMQConfig.java b/src/main/java/org/onedatashare/transferservice/odstransferservice/config/RabbitMQConfig.java index 45a9ff62..212a4731 100644 --- a/src/main/java/org/onedatashare/transferservice/odstransferservice/config/RabbitMQConfig.java +++ b/src/main/java/org/onedatashare/transferservice/odstransferservice/config/RabbitMQConfig.java @@ -17,9 +17,6 @@ public class RabbitMQConfig { @Value("${ods.rabbitmq.exchange}") String exchange; - @Value("${ods.rabbitmq.routingkey}") - String routingKey; - @Bean Queue userQueue() { return new Queue(this.queueName, true, false, false); @@ -34,7 +31,7 @@ public DirectExchange exchange() { public Binding binding(DirectExchange exchange, Queue userQueue) { return BindingBuilder.bind(userQueue) .to(exchange) - .with(routingKey); + .with(queueName); } @Bean diff --git a/src/main/resources/application.properties b/src/main/resources/application.properties index d6e22cdf..d2d28c29 100644 --- a/src/main/resources/application.properties +++ b/src/main/resources/application.properties @@ -26,7 +26,6 @@ ods.rabbitmq.exchange=ods.exchange #for vfs nodes this should be the APP_NAME which is always lowercase. ods.rabbitmq.queue=${CONNECTOR_QUEUE:transferQueue} -ods.rabbitmq.routingkey=ods.routing spring.rabbitmq.addresses=${AMPQ_ADDRESS} spring.rabbitmq.port=${AMPQ_PORT:5672} spring.rabbitmq.username=${AMPQ_USER:guest} From c6a35151a0eea7470500ab44cd9e3ef9ab635af8 Mon Sep 17 00:00:00 2001 From: Jacob Goldverg Date: Tue, 9 Apr 2024 12:16:11 -0400 Subject: [PATCH 100/150] Added breaks so messages dont cascade forgot --- .../odstransferservice/consumer/RabbitMQConsumer.java | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/src/main/java/org/onedatashare/transferservice/odstransferservice/consumer/RabbitMQConsumer.java b/src/main/java/org/onedatashare/transferservice/odstransferservice/consumer/RabbitMQConsumer.java index 8055e29d..13b7ad6d 100644 --- a/src/main/java/org/onedatashare/transferservice/odstransferservice/consumer/RabbitMQConsumer.java +++ b/src/main/java/org/onedatashare/transferservice/odstransferservice/consumer/RabbitMQConsumer.java @@ -47,18 +47,22 @@ public void consumeDefaultMessage(Message message) { switch (messageType) { case TRANSFER_JOB_REQUEST: { this.transferJobRequestHandler.messageHandler(message); + break; } case APPLICATION_PARAM_CHANGE: { this.transferApplicationParamHandler.messageHandler(message); + break; } case CARBON_AVG_REQUEST: { this.carbonAvgRequestHandler.messageHandler(message); + break; } case CARBON_IP_REQUEST: { this.carbonIpRequestHandler.messageHandler(message); + break; } } } catch (IOException e) { From a8f27c16125d22b3440168c8e4145a938b9a37cf Mon Sep 17 00:00:00 2001 From: Jacob Goldverg Date: Tue, 9 Apr 2024 12:55:25 -0400 Subject: [PATCH 101/150] Small changes --- .../config/BatchConfig.java | 1 - .../config/RabbitMQConfig.java | 15 +++++++------ .../message/TransferJobRequestHandler.java | 11 +++++++++- .../model/TransferJobRequest.java | 21 ++++++++++++------- .../service/JobControl.java | 8 +------ 5 files changed, 31 insertions(+), 25 deletions(-) diff --git a/src/main/java/org/onedatashare/transferservice/odstransferservice/config/BatchConfig.java b/src/main/java/org/onedatashare/transferservice/odstransferservice/config/BatchConfig.java index 2b568308..75dbdcfb 100644 --- a/src/main/java/org/onedatashare/transferservice/odstransferservice/config/BatchConfig.java +++ b/src/main/java/org/onedatashare/transferservice/odstransferservice/config/BatchConfig.java @@ -8,7 +8,6 @@ import org.springframework.batch.core.repository.JobRepository; import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.Configuration; -import org.springframework.core.task.SimpleAsyncTaskExecutor; import org.springframework.core.task.SyncTaskExecutor; import org.springframework.jdbc.datasource.DataSourceTransactionManager; import org.springframework.retry.backoff.BackOffPolicy; diff --git a/src/main/java/org/onedatashare/transferservice/odstransferservice/config/RabbitMQConfig.java b/src/main/java/org/onedatashare/transferservice/odstransferservice/config/RabbitMQConfig.java index 212a4731..7819b79f 100644 --- a/src/main/java/org/onedatashare/transferservice/odstransferservice/config/RabbitMQConfig.java +++ b/src/main/java/org/onedatashare/transferservice/odstransferservice/config/RabbitMQConfig.java @@ -34,13 +34,12 @@ public Binding binding(DirectExchange exchange, Queue userQueue) { .with(queueName); } - @Bean - public SimpleRabbitListenerContainerFactory rabbitListenerContainerFactory(ConnectionFactory connectionFactory) { - SimpleRabbitListenerContainerFactory factory = new SimpleRabbitListenerContainerFactory(); - factory.setConnectionFactory(connectionFactory); - factory.setPrefetchCount(1); // Set prefetch count to 1 - factory.setAcknowledgeMode(AcknowledgeMode.AUTO); - return factory; - } +// @Bean +// public SimpleRabbitListenerContainerFactory rabbitListenerContainerFactory(ConnectionFactory connectionFactory) { +// SimpleRabbitListenerContainerFactory factory = new SimpleRabbitListenerContainerFactory(); +// factory.setConnectionFactory(connectionFactory); +// factory.setPrefetchCount(1); // Set prefetch count to 1 +// return factory; +// } } diff --git a/src/main/java/org/onedatashare/transferservice/odstransferservice/message/TransferJobRequestHandler.java b/src/main/java/org/onedatashare/transferservice/odstransferservice/message/TransferJobRequestHandler.java index cd57e1f8..039c0ce9 100644 --- a/src/main/java/org/onedatashare/transferservice/odstransferservice/message/TransferJobRequestHandler.java +++ b/src/main/java/org/onedatashare/transferservice/odstransferservice/message/TransferJobRequestHandler.java @@ -1,18 +1,22 @@ package org.onedatashare.transferservice.odstransferservice.message; import com.fasterxml.jackson.databind.ObjectMapper; +import org.onedatashare.transferservice.odstransferservice.model.EntityInfo; import org.onedatashare.transferservice.odstransferservice.model.TransferJobRequest; import org.onedatashare.transferservice.odstransferservice.service.JobControl; import org.onedatashare.transferservice.odstransferservice.service.JobParamService; +import org.onedatashare.transferservice.odstransferservice.service.expanders.ExpanderFactory; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.amqp.core.Message; import org.springframework.batch.core.JobParameters; import org.springframework.batch.core.JobParametersBuilder; import org.springframework.batch.core.launch.JobLauncher; +import org.springframework.beans.factory.annotation.Autowired; import org.springframework.stereotype.Service; import java.io.IOException; +import java.util.List; @Service public class TransferJobRequestHandler implements MessageHandler { @@ -21,14 +25,17 @@ public class TransferJobRequestHandler implements MessageHandler { private final JobParamService jobParamService; private final JobLauncher jobLauncher; private final JobControl jobControl; + private final ExpanderFactory expanderFactory; + Logger logger = LoggerFactory.getLogger(TransferJobRequestHandler.class); - public TransferJobRequestHandler(ObjectMapper messageObjectMapper, JobParamService jobParamService, JobLauncher jobLauncher, JobControl jobControl) { + public TransferJobRequestHandler(ObjectMapper messageObjectMapper, JobParamService jobParamService, JobLauncher jobLauncher, JobControl jobControl, ExpanderFactory expanderFactory) { this.objectMapper = messageObjectMapper; this.jobParamService = jobParamService; this.jobLauncher = jobLauncher; this.jobControl = jobControl; + this.expanderFactory = expanderFactory; } @Override @@ -36,6 +43,8 @@ public void messageHandler(Message message) throws IOException { String jsonStr = new String(message.getBody()); TransferJobRequest request = objectMapper.readValue(jsonStr, TransferJobRequest.class); logger.info("Job Received: {}", request.toString()); + List fileInfo = expanderFactory.getExpander(request.getSource()); + request.getSource().setInfoList(fileInfo); JobParameters parameters = jobParamService.translate(new JobParametersBuilder(), request); try { jobLauncher.run(jobControl.concurrentJobDefinition(request), parameters); diff --git a/src/main/java/org/onedatashare/transferservice/odstransferservice/model/TransferJobRequest.java b/src/main/java/org/onedatashare/transferservice/odstransferservice/model/TransferJobRequest.java index 0064299e..2c2b6ffb 100644 --- a/src/main/java/org/onedatashare/transferservice/odstransferservice/model/TransferJobRequest.java +++ b/src/main/java/org/onedatashare/transferservice/odstransferservice/model/TransferJobRequest.java @@ -6,8 +6,7 @@ import org.onedatashare.transferservice.odstransferservice.model.credential.AccountEndpointCredential; import org.onedatashare.transferservice.odstransferservice.model.credential.OAuthEndpointCredential; - -import java.util.ArrayList; +import java.util.List; import java.util.UUID; @Data @@ -15,10 +14,13 @@ @NoArgsConstructor public class TransferJobRequest { - @NonNull private String ownerId; + @NonNull + private String ownerId; private int connectionBufferSize; - @NonNull private Source source; - @NonNull private Destination destination; + @NonNull + private Source source; + @NonNull + private Destination destination; private TransferOptions options; @JsonInclude(JsonInclude.Include.NON_NULL) private UUID jobUuid; @@ -28,7 +30,8 @@ public class TransferJobRequest { @AllArgsConstructor @NoArgsConstructor public static class Destination { - @NonNull private EndpointType type; + @NonNull + private EndpointType type; String credId; private AccountEndpointCredential vfsDestCredential; private OAuthEndpointCredential oauthDestCredential; @@ -39,11 +42,13 @@ public static class Destination { @AllArgsConstructor @NoArgsConstructor public static class Source { - @NonNull private EndpointType type; + @NonNull + private EndpointType type; String credId; private AccountEndpointCredential vfsSourceCredential; private OAuthEndpointCredential oauthSourceCredential; private String fileSourcePath; - @NonNull private ArrayList infoList; + @NonNull + private List infoList; } } \ No newline at end of file diff --git a/src/main/java/org/onedatashare/transferservice/odstransferservice/service/JobControl.java b/src/main/java/org/onedatashare/transferservice/odstransferservice/service/JobControl.java index 4d17a936..1e30dc90 100644 --- a/src/main/java/org/onedatashare/transferservice/odstransferservice/service/JobControl.java +++ b/src/main/java/org/onedatashare/transferservice/odstransferservice/service/JobControl.java @@ -4,11 +4,9 @@ import lombok.NoArgsConstructor; import lombok.Setter; import org.onedatashare.transferservice.odstransferservice.model.DataChunk; -import org.onedatashare.transferservice.odstransferservice.model.EntityInfo; import org.onedatashare.transferservice.odstransferservice.model.TransferJobRequest; import org.onedatashare.transferservice.odstransferservice.pools.ThreadPoolContract; import org.onedatashare.transferservice.odstransferservice.service.DatabaseService.InfluxIOService; -import org.onedatashare.transferservice.odstransferservice.service.expanders.ExpanderFactory; import org.onedatashare.transferservice.odstransferservice.service.listner.JobCompletionListener; import org.onedatashare.transferservice.odstransferservice.service.step.ReaderWriterFactory; import org.slf4j.Logger; @@ -37,9 +35,6 @@ public class JobControl { Logger logger = LoggerFactory.getLogger(JobControl.class); - @Autowired - ExpanderFactory expanderFactory; - @Autowired ReaderWriterFactory readerWriterFactory; @@ -66,8 +61,7 @@ public class JobControl { private List createConcurrentFlow(TransferJobRequest request) { String basePath = request.getSource().getFileSourcePath(); - List fileInfo = expanderFactory.getExpander(request.getSource()); - return fileInfo.stream().map(file -> { + return request.getSource().getInfoList().stream().map(file -> { String idForStep = ""; if (!file.getId().isEmpty()) { idForStep = file.getId(); From e9d476743cdbff0d731767b38324608fd38927e3 Mon Sep 17 00:00:00 2001 From: Jacob Goldverg Date: Tue, 9 Apr 2024 13:04:07 -0400 Subject: [PATCH 102/150] Less print lines --- .../odstransferservice/service/PmeterParser.java | 2 +- .../odstransferservice/service/expanders/HttpExpander.java | 3 +-- 2 files changed, 2 insertions(+), 3 deletions(-) diff --git a/src/main/java/org/onedatashare/transferservice/odstransferservice/service/PmeterParser.java b/src/main/java/org/onedatashare/transferservice/odstransferservice/service/PmeterParser.java index f7c5c40e..9000d483 100644 --- a/src/main/java/org/onedatashare/transferservice/odstransferservice/service/PmeterParser.java +++ b/src/main/java/org/onedatashare/transferservice/odstransferservice/service/PmeterParser.java @@ -126,7 +126,7 @@ public CarbonScore carbonAverageTraceRoute(String ip) { filePath.toFile().createNewFile(); return score; } catch (IOException e) { - e.printStackTrace(); + logger.error("Error Carbon Average Trace Route:\n {}", e.getMessage()); return new CarbonScore(); } } diff --git a/src/main/java/org/onedatashare/transferservice/odstransferservice/service/expanders/HttpExpander.java b/src/main/java/org/onedatashare/transferservice/odstransferservice/service/expanders/HttpExpander.java index e3953d13..11dcaf83 100644 --- a/src/main/java/org/onedatashare/transferservice/odstransferservice/service/expanders/HttpExpander.java +++ b/src/main/java/org/onedatashare/transferservice/odstransferservice/service/expanders/HttpExpander.java @@ -57,7 +57,6 @@ public List expandedFileSystem(List userSelectedResource Document doc = Jsoup.connect(this.credential.getUri() + basePath + selectedFiles.getPath()) .ignoreContentType(true) .get(); - logger.info(doc.toString()); Elements links = doc.select("body a"); for (Element elem : links) { if (elem.text().endsWith("/")) { //directory to expand @@ -77,7 +76,7 @@ public List expandedFileSystem(List userSelectedResource if (directory.text().contains("..") || directory.text().contains(".")) { continue; } - logger.info(directory.baseUri() + directory.text()); + logger.info("File: {}", directory.baseUri() + directory.text()); Document doc = Jsoup.connect(directory.baseUri() + basePath +directory.text()).get(); Elements links = doc.select("body a"); for (Element elem : links) { From fe71e5f7133bba357cdac1a2ddfda5814198a937 Mon Sep 17 00:00:00 2001 From: Jacob Goldverg Date: Tue, 9 Apr 2024 13:59:35 -0400 Subject: [PATCH 103/150] yup --- .../odstransferservice/pools/ThreadPoolManagerPlatform.java | 1 + .../transferservice/odstransferservice/service/JobControl.java | 1 - 2 files changed, 1 insertion(+), 1 deletion(-) diff --git a/src/main/java/org/onedatashare/transferservice/odstransferservice/pools/ThreadPoolManagerPlatform.java b/src/main/java/org/onedatashare/transferservice/odstransferservice/pools/ThreadPoolManagerPlatform.java index 086ec0a8..18ff73ca 100644 --- a/src/main/java/org/onedatashare/transferservice/odstransferservice/pools/ThreadPoolManagerPlatform.java +++ b/src/main/java/org/onedatashare/transferservice/odstransferservice/pools/ThreadPoolManagerPlatform.java @@ -26,6 +26,7 @@ public ThreadPoolTaskExecutor createExecutor(int threadCount, String prefix) { ThreadPoolTaskExecutor executor = new ThreadPoolTaskExecutor(); executor.setAllowCoreThreadTimeOut(false); executor.setCorePoolSize(threadCount); + executor.setPrestartAllCoreThreads(true); executor.setThreadNamePrefix(prefix); executor.initialize(); if (this.platformThreadMap == null) { diff --git a/src/main/java/org/onedatashare/transferservice/odstransferservice/service/JobControl.java b/src/main/java/org/onedatashare/transferservice/odstransferservice/service/JobControl.java index 1e30dc90..c2c1433e 100644 --- a/src/main/java/org/onedatashare/transferservice/odstransferservice/service/JobControl.java +++ b/src/main/java/org/onedatashare/transferservice/odstransferservice/service/JobControl.java @@ -76,7 +76,6 @@ private List createConcurrentFlow(TransferJobRequest request) { if (request.getOptions().getParallelThreadCount() > 0) { stepBuilder.taskExecutor(threadPool.parallelPool(request.getOptions().getParallelThreadCount(), file.getPath())); } - stepBuilder.throttleLimit(64); stepBuilder.faultTolerant() .backOffPolicy(this.backOffPolicy); return new FlowBuilder(basePath + idForStep) From 7863728302bf97ded3fae415214aede6f355e37a Mon Sep 17 00:00:00 2001 From: Jacob Goldverg Date: Wed, 10 Apr 2024 00:27:58 -0400 Subject: [PATCH 104/150] Changed the job launcher to support multiple jobs, and removed some commented code --- .../odstransferservice/config/BatchConfig.java | 5 +++-- .../odstransferservice/config/RabbitMQConfig.java | 15 ++++----------- 2 files changed, 7 insertions(+), 13 deletions(-) diff --git a/src/main/java/org/onedatashare/transferservice/odstransferservice/config/BatchConfig.java b/src/main/java/org/onedatashare/transferservice/odstransferservice/config/BatchConfig.java index 75dbdcfb..570ff12d 100644 --- a/src/main/java/org/onedatashare/transferservice/odstransferservice/config/BatchConfig.java +++ b/src/main/java/org/onedatashare/transferservice/odstransferservice/config/BatchConfig.java @@ -8,7 +8,7 @@ import org.springframework.batch.core.repository.JobRepository; import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.Configuration; -import org.springframework.core.task.SyncTaskExecutor; +import org.springframework.core.task.SimpleAsyncTaskExecutor; import org.springframework.jdbc.datasource.DataSourceTransactionManager; import org.springframework.retry.backoff.BackOffPolicy; import org.springframework.retry.backoff.ExponentialBackOffPolicy; @@ -20,6 +20,7 @@ @Configuration public class BatchConfig { + @Bean public ObjectMapper messageObjectMapper() { ObjectMapper objectMapper = new ObjectMapper(); @@ -37,7 +38,7 @@ public PlatformTransactionManager transactionManager(DataSource dataSource) { public JobLauncher jobLauncher(JobRepository jobRepository) { TaskExecutorJobLauncher jobLauncher = new TaskExecutorJobLauncher(); jobLauncher.setJobRepository(jobRepository); - jobLauncher.setTaskExecutor(new SyncTaskExecutor()); + jobLauncher.setTaskExecutor(new SimpleAsyncTaskExecutor()); return jobLauncher; } diff --git a/src/main/java/org/onedatashare/transferservice/odstransferservice/config/RabbitMQConfig.java b/src/main/java/org/onedatashare/transferservice/odstransferservice/config/RabbitMQConfig.java index 7819b79f..5fb680b1 100644 --- a/src/main/java/org/onedatashare/transferservice/odstransferservice/config/RabbitMQConfig.java +++ b/src/main/java/org/onedatashare/transferservice/odstransferservice/config/RabbitMQConfig.java @@ -1,8 +1,9 @@ package org.onedatashare.transferservice.odstransferservice.config; -import org.springframework.amqp.core.*; -import org.springframework.amqp.rabbit.config.SimpleRabbitListenerContainerFactory; -import org.springframework.amqp.rabbit.connection.ConnectionFactory; +import org.springframework.amqp.core.Binding; +import org.springframework.amqp.core.BindingBuilder; +import org.springframework.amqp.core.DirectExchange; +import org.springframework.amqp.core.Queue; import org.springframework.beans.factory.annotation.Value; import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.Configuration; @@ -34,12 +35,4 @@ public Binding binding(DirectExchange exchange, Queue userQueue) { .with(queueName); } -// @Bean -// public SimpleRabbitListenerContainerFactory rabbitListenerContainerFactory(ConnectionFactory connectionFactory) { -// SimpleRabbitListenerContainerFactory factory = new SimpleRabbitListenerContainerFactory(); -// factory.setConnectionFactory(connectionFactory); -// factory.setPrefetchCount(1); // Set prefetch count to 1 -// return factory; -// } - } From 10dc188766a97453b7b9ad430f9325939059d815 Mon Sep 17 00:00:00 2001 From: likhithakotla Date: Tue, 14 May 2024 23:56:39 -0400 Subject: [PATCH 105/150] Added DropBoxWriterSmallFile --- parallel.txt | 0 .../model/DropBoxSmallFileUpload.java | 40 +++++++++++ .../service/step/ReaderWriterFactory.java | 13 ++-- .../service/step/box/BoxWriterSmallFile.java | 11 +-- .../step/dropbox/DropBoxWriterLargeFile.java | 5 ++ .../step/dropbox/DropBoxWriterSmallFile.java | 71 +++++++++++++++++++ 6 files changed, 131 insertions(+), 9 deletions(-) create mode 100644 parallel.txt create mode 100644 src/main/java/org/onedatashare/transferservice/odstransferservice/model/DropBoxSmallFileUpload.java create mode 100644 src/main/java/org/onedatashare/transferservice/odstransferservice/service/step/dropbox/DropBoxWriterLargeFile.java create mode 100644 src/main/java/org/onedatashare/transferservice/odstransferservice/service/step/dropbox/DropBoxWriterSmallFile.java diff --git a/parallel.txt b/parallel.txt new file mode 100644 index 00000000..e69de29b diff --git a/src/main/java/org/onedatashare/transferservice/odstransferservice/model/DropBoxSmallFileUpload.java b/src/main/java/org/onedatashare/transferservice/odstransferservice/model/DropBoxSmallFileUpload.java new file mode 100644 index 00000000..c6a7d94b --- /dev/null +++ b/src/main/java/org/onedatashare/transferservice/odstransferservice/model/DropBoxSmallFileUpload.java @@ -0,0 +1,40 @@ +package org.onedatashare.transferservice.odstransferservice.model; + +import java.io.ByteArrayInputStream; +import java.io.InputStream; +import java.util.List; +import java.util.PriorityQueue; + +import lombok.Getter; +import lombok.Setter; + +/** + * This class is used to Buffer all of the data from a small file. So any file less than 20MB according to the box api + */ +@Getter +@Setter +public class DropBoxSmallFileUpload { + private PriorityQueue dataChunkPriorityQueue; + + public DropBoxSmallFileUpload(){ + this.dataChunkPriorityQueue = new PriorityQueue(new DataChunkComparator()); + } + + public void addAllChunks(List chunks){ + this.dataChunkPriorityQueue.addAll(chunks); + } + + public InputStream condenseListToOneStream(){ + int totalLength = this.dataChunkPriorityQueue.stream().mapToInt(byteArray -> byteArray.getData().length).sum(); + byte[] combinedBytes = new byte[totalLength]; + + int currentIndex = 0; + for (DataChunk chunk : dataChunkPriorityQueue) { + byte[] byteArray = chunk.getData(); + System.arraycopy(byteArray, 0, combinedBytes, currentIndex, byteArray.length); + currentIndex += byteArray.length; + } + + return new ByteArrayInputStream(combinedBytes); + } +} diff --git a/src/main/java/org/onedatashare/transferservice/odstransferservice/service/step/ReaderWriterFactory.java b/src/main/java/org/onedatashare/transferservice/odstransferservice/service/step/ReaderWriterFactory.java index 2b32f7a1..04b6cac2 100644 --- a/src/main/java/org/onedatashare/transferservice/odstransferservice/service/step/ReaderWriterFactory.java +++ b/src/main/java/org/onedatashare/transferservice/odstransferservice/service/step/ReaderWriterFactory.java @@ -1,5 +1,7 @@ package org.onedatashare.transferservice.odstransferservice.service.step; +import static org.onedatashare.transferservice.odstransferservice.constant.ODSConstants.*; + import org.onedatashare.transferservice.odstransferservice.model.DataChunk; import org.onedatashare.transferservice.odstransferservice.model.EntityInfo; import org.onedatashare.transferservice.odstransferservice.model.TransferJobRequest; @@ -15,6 +17,7 @@ import org.onedatashare.transferservice.odstransferservice.service.step.box.BoxWriterSmallFile; import org.onedatashare.transferservice.odstransferservice.service.step.dropbox.DropBoxChunkedWriter; import org.onedatashare.transferservice.odstransferservice.service.step.dropbox.DropBoxReader; +import org.onedatashare.transferservice.odstransferservice.service.step.dropbox.DropBoxWriterSmallFile; import org.onedatashare.transferservice.odstransferservice.service.step.ftp.FTPReader; import org.onedatashare.transferservice.odstransferservice.service.step.ftp.FTPWriter; import org.onedatashare.transferservice.odstransferservice.service.step.googleDrive.GDriveReader; @@ -31,9 +34,6 @@ import org.springframework.batch.item.ItemWriter; import org.springframework.stereotype.Service; -import static org.onedatashare.transferservice.odstransferservice.constant.ODSConstants.FIVE_MB; -import static org.onedatashare.transferservice.odstransferservice.constant.ODSConstants.TWENTY_MB; - @Service public class ReaderWriterFactory { @@ -118,7 +118,12 @@ public ItemWriter getRightWriter(TransferJobRequest.Destination desti return boxWriterLargeFile; } case dropbox: - return new DropBoxChunkedWriter(destination.getOauthDestCredential(), this.metricsCollector, this.influxCache); + final long DROPBOX_SINGLE_UPLOAD_LIMIT = 150L * 1024L * 1024L; + if (fileInfo.getSize() < DROPBOX_SINGLE_UPLOAD_LIMIT){ + return new DropBoxWriterSmallFile(destination.getOauthDestCredential(), fileInfo, this.metricsCollector, this.influxCache); + }else { + return new DropBoxChunkedWriter(destination.getOauthDestCredential(), this.metricsCollector, this.influxCache); + } case scp: SCPWriter scpWriter = new SCPWriter(fileInfo, this.metricsCollector, this.influxCache); scpWriter.setPool(connectionBag.getSftpWriterPool()); diff --git a/src/main/java/org/onedatashare/transferservice/odstransferservice/service/step/box/BoxWriterSmallFile.java b/src/main/java/org/onedatashare/transferservice/odstransferservice/service/step/box/BoxWriterSmallFile.java index 9a6d7707..69520634 100644 --- a/src/main/java/org/onedatashare/transferservice/odstransferservice/service/step/box/BoxWriterSmallFile.java +++ b/src/main/java/org/onedatashare/transferservice/odstransferservice/service/step/box/BoxWriterSmallFile.java @@ -1,7 +1,9 @@ package org.onedatashare.transferservice.odstransferservice.service.step.box; + +import static org.onedatashare.transferservice.odstransferservice.constant.ODSConstants.*; + +import java.util.List; -import com.box.sdk.BoxAPIConnection; -import com.box.sdk.BoxFolder; import org.onedatashare.transferservice.odstransferservice.model.BoxSmallFileUpload; import org.onedatashare.transferservice.odstransferservice.model.DataChunk; import org.onedatashare.transferservice.odstransferservice.model.EntityInfo; @@ -18,9 +20,8 @@ import org.springframework.batch.item.Chunk; import org.springframework.batch.item.ItemWriter; -import java.util.List; - -import static org.onedatashare.transferservice.odstransferservice.constant.ODSConstants.DEST_BASE_PATH; +import com.box.sdk.BoxAPIConnection; +import com.box.sdk.BoxFolder; public class BoxWriterSmallFile extends ODSBaseWriter implements ItemWriter { diff --git a/src/main/java/org/onedatashare/transferservice/odstransferservice/service/step/dropbox/DropBoxWriterLargeFile.java b/src/main/java/org/onedatashare/transferservice/odstransferservice/service/step/dropbox/DropBoxWriterLargeFile.java new file mode 100644 index 00000000..92f2ad63 --- /dev/null +++ b/src/main/java/org/onedatashare/transferservice/odstransferservice/service/step/dropbox/DropBoxWriterLargeFile.java @@ -0,0 +1,5 @@ +package org.onedatashare.transferservice.odstransferservice.service.step.dropbox; + +public class DropBoxWriterLargeFile { + +} diff --git a/src/main/java/org/onedatashare/transferservice/odstransferservice/service/step/dropbox/DropBoxWriterSmallFile.java b/src/main/java/org/onedatashare/transferservice/odstransferservice/service/step/dropbox/DropBoxWriterSmallFile.java new file mode 100644 index 00000000..21b36561 --- /dev/null +++ b/src/main/java/org/onedatashare/transferservice/odstransferservice/service/step/dropbox/DropBoxWriterSmallFile.java @@ -0,0 +1,71 @@ +package org.onedatashare.transferservice.odstransferservice.service.step.dropbox; + +import static org.onedatashare.transferservice.odstransferservice.constant.ODSConstants.*; + +import java.io.InputStream; +import java.util.List; + +import org.onedatashare.transferservice.odstransferservice.model.DataChunk; +import org.onedatashare.transferservice.odstransferservice.model.DropBoxSmallFileUpload; +import org.onedatashare.transferservice.odstransferservice.model.EntityInfo; +import org.onedatashare.transferservice.odstransferservice.model.credential.OAuthEndpointCredential; +import org.onedatashare.transferservice.odstransferservice.service.InfluxCache; +import org.onedatashare.transferservice.odstransferservice.service.MetricsCollector; +import org.onedatashare.transferservice.odstransferservice.service.step.ODSBaseWriter; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.springframework.batch.core.ExitStatus; +import org.springframework.batch.core.StepExecution; +import org.springframework.batch.core.annotation.AfterStep; +import org.springframework.batch.core.annotation.BeforeStep; +import org.springframework.batch.item.Chunk; +import org.springframework.batch.item.ItemWriter; + +import com.dropbox.core.DbxRequestConfig; +import com.dropbox.core.v2.DbxClientV2; +import com.dropbox.core.v2.files.WriteMode; + +public class DropBoxWriterSmallFile extends ODSBaseWriter implements ItemWriter { + + EntityInfo fileInfo; + String destinationBasePath; + DropBoxSmallFileUpload smallFileUpload; + DbxClientV2 dropboxClient; + Logger logger = LoggerFactory.getLogger(DropBoxWriterSmallFile.class); + private String fileName; + + public DropBoxWriterSmallFile(OAuthEndpointCredential credential, EntityInfo fileInfo, MetricsCollector metricsCollector, InfluxCache influxCache) { + super(metricsCollector, influxCache); + DbxRequestConfig config = DbxRequestConfig.newBuilder("dropbox/java-tutorial").build(); + this.dropboxClient = new DbxClientV2(config, credential.getToken()); + this.fileInfo = fileInfo; + smallFileUpload = new DropBoxSmallFileUpload(); + } + @BeforeStep + public void beforeStep(StepExecution stepExecution) { + this.destinationBasePath = stepExecution.getJobParameters().getString(DEST_BASE_PATH); + this.stepExecution = stepExecution; + } + + @AfterStep + public ExitStatus afterStep(StepExecution stepExecution) throws Exception { + try (InputStream inputStream = this.smallFileUpload.condenseListToOneStream()) { + dropboxClient.files().uploadBuilder(destinationBasePath + "/" + fileName) + .withMode(WriteMode.ADD) + .uploadAndFinish(inputStream); + } catch (Exception e) { + logger.error("Error uploading file to Dropbox: ", e); + return ExitStatus.FAILED; + } + return stepExecution.getExitStatus(); + } + + + @Override + public void write(Chunk chunk) throws Exception { + List items = chunk.getItems(); + this.fileName = items.get(0).getFileName(); + this.smallFileUpload.addAllChunks(items); + logger.info("Small file Dropbox writer wrote {} DataChunks", items.size()); + } +} \ No newline at end of file From c782c99f5325a4f27cb4fafd74e43b93d5c8e63b Mon Sep 17 00:00:00 2001 From: likhithakotla <157924680+likhithakotla@users.noreply.github.com> Date: Thu, 16 May 2024 03:03:17 -0400 Subject: [PATCH 106/150] Delete parallel.txt --- parallel.txt | 0 1 file changed, 0 insertions(+), 0 deletions(-) delete mode 100644 parallel.txt diff --git a/parallel.txt b/parallel.txt deleted file mode 100644 index e69de29b..00000000 From a85554d1a0cf3933ec42ccbb46d96d565a9d16f0 Mon Sep 17 00:00:00 2001 From: likhithakotla <157924680+likhithakotla@users.noreply.github.com> Date: Thu, 16 May 2024 03:03:58 -0400 Subject: [PATCH 107/150] Delete src/main/java/org/onedatashare/transferservice/odstransferservice/service/step/dropbox/DropBoxWriterLargeFile.java --- .../service/step/dropbox/DropBoxWriterLargeFile.java | 5 ----- 1 file changed, 5 deletions(-) delete mode 100644 src/main/java/org/onedatashare/transferservice/odstransferservice/service/step/dropbox/DropBoxWriterLargeFile.java diff --git a/src/main/java/org/onedatashare/transferservice/odstransferservice/service/step/dropbox/DropBoxWriterLargeFile.java b/src/main/java/org/onedatashare/transferservice/odstransferservice/service/step/dropbox/DropBoxWriterLargeFile.java deleted file mode 100644 index 92f2ad63..00000000 --- a/src/main/java/org/onedatashare/transferservice/odstransferservice/service/step/dropbox/DropBoxWriterLargeFile.java +++ /dev/null @@ -1,5 +0,0 @@ -package org.onedatashare.transferservice.odstransferservice.service.step.dropbox; - -public class DropBoxWriterLargeFile { - -} From a88ba95a9fd88701cc338717ddd045d3b6a53382 Mon Sep 17 00:00:00 2001 From: Jacob Goldverg Date: Fri, 24 May 2024 14:33:13 -0400 Subject: [PATCH 108/150] Refactored classes that were duplicate for small file upload --- .../model/AWSSinglePutRequestMetaData.java | 54 ------------------- .../model/BoxSmallFileUpload.java | 41 -------------- ...llFileUpload.java => SmallFileUpload.java} | 14 +++-- .../AmazonS3/AmazonS3SmallFileWriter.java | 12 ++--- .../service/step/box/BoxWriterSmallFile.java | 6 +-- .../step/dropbox/DropBoxWriterSmallFile.java | 26 ++++----- 6 files changed, 28 insertions(+), 125 deletions(-) delete mode 100644 src/main/java/org/onedatashare/transferservice/odstransferservice/model/AWSSinglePutRequestMetaData.java delete mode 100644 src/main/java/org/onedatashare/transferservice/odstransferservice/model/BoxSmallFileUpload.java rename src/main/java/org/onedatashare/transferservice/odstransferservice/model/{DropBoxSmallFileUpload.java => SmallFileUpload.java} (84%) diff --git a/src/main/java/org/onedatashare/transferservice/odstransferservice/model/AWSSinglePutRequestMetaData.java b/src/main/java/org/onedatashare/transferservice/odstransferservice/model/AWSSinglePutRequestMetaData.java deleted file mode 100644 index bc361574..00000000 --- a/src/main/java/org/onedatashare/transferservice/odstransferservice/model/AWSSinglePutRequestMetaData.java +++ /dev/null @@ -1,54 +0,0 @@ -package org.onedatashare.transferservice.odstransferservice.model; - -import lombok.Getter; -import lombok.Setter; -import lombok.SneakyThrows; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import java.io.ByteArrayInputStream; -import java.io.InputStream; -import java.math.BigInteger; -import java.nio.ByteBuffer; -import java.security.MessageDigest; -import java.util.*; -import java.util.concurrent.ConcurrentLinkedQueue; -import java.util.stream.Collectors; - -@Getter -@Setter -public class AWSSinglePutRequestMetaData { - private Queue dataChunkPriorityQueue; - Logger logger = LoggerFactory.getLogger(AWSSinglePutRequestMetaData.class); - - public AWSSinglePutRequestMetaData(){ - this.dataChunkPriorityQueue = new ConcurrentLinkedQueue(); - } - public void addChunk(DataChunk chunk){ - this.dataChunkPriorityQueue.add(chunk); - } - public void addAllChunks(List chunks){ - this.dataChunkPriorityQueue.addAll(chunks); - } - - @SneakyThrows - public InputStream condenseListToOneStream(long size){ - MessageDigest md = MessageDigest.getInstance("SHA-256"); - byte[] data = new byte[Long.valueOf(size).intValue()]; - ByteBuffer buffer = ByteBuffer.wrap(data); - List list = this.dataChunkPriorityQueue.stream().sorted(new DataChunkComparator()).collect(Collectors.toList()); - for(DataChunk currentChunk : list){ - logger.info("Processing chunk {}", currentChunk); - buffer.put(currentChunk.getData()); - md.update(currentChunk.getData()); - } - String output = String.format("%032X", new BigInteger(1, md.digest())); - logger.info(String.valueOf(output)); - this.dataChunkPriorityQueue.clear(); - return new ByteArrayInputStream(buffer.array()); - } - - public void clear(){ - this.dataChunkPriorityQueue.clear(); - } -} \ No newline at end of file diff --git a/src/main/java/org/onedatashare/transferservice/odstransferservice/model/BoxSmallFileUpload.java b/src/main/java/org/onedatashare/transferservice/odstransferservice/model/BoxSmallFileUpload.java deleted file mode 100644 index 0d328ced..00000000 --- a/src/main/java/org/onedatashare/transferservice/odstransferservice/model/BoxSmallFileUpload.java +++ /dev/null @@ -1,41 +0,0 @@ -package org.onedatashare.transferservice.odstransferservice.model; - -import lombok.Getter; -import lombok.Setter; - -import java.io.ByteArrayInputStream; -import java.io.InputStream; -import java.nio.ByteBuffer; -import java.util.List; -import java.util.PriorityQueue; - -/** - * This class is used to Buffer all of the data from a small file. So any file less than 20MB according to the box api - */ -@Getter -@Setter -public class BoxSmallFileUpload { - private PriorityQueue dataChunkPriorityQueue; - - public BoxSmallFileUpload(){ - this.dataChunkPriorityQueue = new PriorityQueue(new DataChunkComparator()); - } - - public void addAllChunks(List chunks){ - this.dataChunkPriorityQueue.addAll(chunks); - } - - public InputStream condenseListToOneStream(){ - int totalLength = this.dataChunkPriorityQueue.stream().mapToInt(byteArray -> byteArray.getData().length).sum(); - byte[] combinedBytes = new byte[totalLength]; - - int currentIndex = 0; - for (DataChunk chunk : dataChunkPriorityQueue) { - byte[] byteArray = chunk.getData(); - System.arraycopy(byteArray, 0, combinedBytes, currentIndex, byteArray.length); - currentIndex += byteArray.length; - } - - return new ByteArrayInputStream(combinedBytes); - } -} diff --git a/src/main/java/org/onedatashare/transferservice/odstransferservice/model/DropBoxSmallFileUpload.java b/src/main/java/org/onedatashare/transferservice/odstransferservice/model/SmallFileUpload.java similarity index 84% rename from src/main/java/org/onedatashare/transferservice/odstransferservice/model/DropBoxSmallFileUpload.java rename to src/main/java/org/onedatashare/transferservice/odstransferservice/model/SmallFileUpload.java index c6a7d94b..41b3f893 100644 --- a/src/main/java/org/onedatashare/transferservice/odstransferservice/model/DropBoxSmallFileUpload.java +++ b/src/main/java/org/onedatashare/transferservice/odstransferservice/model/SmallFileUpload.java @@ -1,22 +1,20 @@ package org.onedatashare.transferservice.odstransferservice.model; +import lombok.Getter; +import lombok.Setter; + import java.io.ByteArrayInputStream; import java.io.InputStream; import java.util.List; import java.util.PriorityQueue; -import lombok.Getter; -import lombok.Setter; - -/** - * This class is used to Buffer all of the data from a small file. So any file less than 20MB according to the box api - */ @Getter @Setter -public class DropBoxSmallFileUpload { +public class SmallFileUpload { + private PriorityQueue dataChunkPriorityQueue; - public DropBoxSmallFileUpload(){ + public SmallFileUpload(){ this.dataChunkPriorityQueue = new PriorityQueue(new DataChunkComparator()); } diff --git a/src/main/java/org/onedatashare/transferservice/odstransferservice/service/step/AmazonS3/AmazonS3SmallFileWriter.java b/src/main/java/org/onedatashare/transferservice/odstransferservice/service/step/AmazonS3/AmazonS3SmallFileWriter.java index f50130a6..ecc25ad0 100644 --- a/src/main/java/org/onedatashare/transferservice/odstransferservice/service/step/AmazonS3/AmazonS3SmallFileWriter.java +++ b/src/main/java/org/onedatashare/transferservice/odstransferservice/service/step/AmazonS3/AmazonS3SmallFileWriter.java @@ -5,9 +5,9 @@ import com.amazonaws.services.s3.model.PutObjectRequest; import com.amazonaws.services.s3.model.PutObjectResult; import lombok.Setter; -import org.onedatashare.transferservice.odstransferservice.model.AWSSinglePutRequestMetaData; import org.onedatashare.transferservice.odstransferservice.model.DataChunk; import org.onedatashare.transferservice.odstransferservice.model.EntityInfo; +import org.onedatashare.transferservice.odstransferservice.model.SmallFileUpload; import org.onedatashare.transferservice.odstransferservice.model.credential.AccountEndpointCredential; import org.onedatashare.transferservice.odstransferservice.pools.S3ConnectionPool; import org.onedatashare.transferservice.odstransferservice.service.InfluxCache; @@ -34,7 +34,7 @@ public class AmazonS3SmallFileWriter extends ODSBaseWriter implements ItemWriter private final AccountEndpointCredential destCredential; Logger logger = LoggerFactory.getLogger(AmazonS3SmallFileWriter.class); private String destBasepath; - private AWSSinglePutRequestMetaData putObjectRequest; + private SmallFileUpload smallFileUpload; private AmazonS3 client; @Setter private S3ConnectionPool pool; @@ -46,7 +46,7 @@ public AmazonS3SmallFileWriter(AccountEndpointCredential destCredential, EntityI this.fileName = fileInfo.getId(); this.fileInfo = fileInfo; this.destCredential = destCredential; - this.putObjectRequest = new AWSSinglePutRequestMetaData(); + this.smallFileUpload = new SmallFileUpload(); String[] temp = this.destCredential.getUri().split(":::"); this.bucketName = temp[1]; } @@ -62,11 +62,11 @@ public void beforeStep(StepExecution stepExecution) throws InterruptedException @AfterStep public ExitStatus afterStep(StepExecution stepExecution) { - PutObjectRequest putObjectRequest = new PutObjectRequest(this.bucketName, Paths.get(this.destBasepath, fileName).toString(), this.putObjectRequest.condenseListToOneStream(this.fileInfo.getSize()), makeMetaDataForSinglePutRequest(this.fileInfo.getSize())); + PutObjectRequest putObjectRequest = new PutObjectRequest(this.bucketName, Paths.get(this.destBasepath, fileName).toString(), this.smallFileUpload.condenseListToOneStream(), makeMetaDataForSinglePutRequest(this.fileInfo.getSize())); PutObjectResult result = client.putObject(putObjectRequest); logger.info("Pushed the final chunk of the small file"); logger.info(result.toString()); - this.putObjectRequest.clear(); + this.smallFileUpload.getDataChunkPriorityQueue().clear(); this.pool.returnObject(this.client); return stepExecution.getExitStatus(); } @@ -81,6 +81,6 @@ public ObjectMetadata makeMetaDataForSinglePutRequest(long size) { public void write(Chunk chunk) throws Exception { List items = chunk.getItems(); this.fileName = items.get(0).getFileName(); - this.putObjectRequest.addAllChunks(items); + this.smallFileUpload.addAllChunks(items); } } diff --git a/src/main/java/org/onedatashare/transferservice/odstransferservice/service/step/box/BoxWriterSmallFile.java b/src/main/java/org/onedatashare/transferservice/odstransferservice/service/step/box/BoxWriterSmallFile.java index 69520634..5e69a6d4 100644 --- a/src/main/java/org/onedatashare/transferservice/odstransferservice/service/step/box/BoxWriterSmallFile.java +++ b/src/main/java/org/onedatashare/transferservice/odstransferservice/service/step/box/BoxWriterSmallFile.java @@ -4,9 +4,9 @@ import java.util.List; -import org.onedatashare.transferservice.odstransferservice.model.BoxSmallFileUpload; import org.onedatashare.transferservice.odstransferservice.model.DataChunk; import org.onedatashare.transferservice.odstransferservice.model.EntityInfo; +import org.onedatashare.transferservice.odstransferservice.model.SmallFileUpload; import org.onedatashare.transferservice.odstransferservice.model.credential.OAuthEndpointCredential; import org.onedatashare.transferservice.odstransferservice.service.InfluxCache; import org.onedatashare.transferservice.odstransferservice.service.MetricsCollector; @@ -28,7 +28,7 @@ public class BoxWriterSmallFile extends ODSBaseWriter implements ItemWriter { EntityInfo fileInfo; String destinationBasePath; - DropBoxSmallFileUpload smallFileUpload; + SmallFileUpload smallFileUpload; DbxClientV2 dropboxClient; Logger logger = LoggerFactory.getLogger(DropBoxWriterSmallFile.class); private String fileName; @@ -39,11 +38,12 @@ public DropBoxWriterSmallFile(OAuthEndpointCredential credential, EntityInfo fil DbxRequestConfig config = DbxRequestConfig.newBuilder("dropbox/java-tutorial").build(); this.dropboxClient = new DbxClientV2(config, credential.getToken()); this.fileInfo = fileInfo; - smallFileUpload = new DropBoxSmallFileUpload(); + smallFileUpload = new SmallFileUpload(); } + @BeforeStep public void beforeStep(StepExecution stepExecution) { - this.destinationBasePath = stepExecution.getJobParameters().getString(DEST_BASE_PATH); + this.destinationBasePath = stepExecution.getJobParameters().getString(DEST_BASE_PATH); this.stepExecution = stepExecution; } @@ -59,7 +59,7 @@ public ExitStatus afterStep(StepExecution stepExecution) throws Exception { } return stepExecution.getExitStatus(); } - + @Override public void write(Chunk chunk) throws Exception { From 5704c8b6f8d6b54882984b2b8cbaaf72a6b610d4 Mon Sep 17 00:00:00 2001 From: Jacob Goldverg Date: Fri, 24 May 2024 14:34:01 -0400 Subject: [PATCH 109/150] changing the import --- .../odstransferservice/service/step/ReaderWriterFactory.java | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/src/main/java/org/onedatashare/transferservice/odstransferservice/service/step/ReaderWriterFactory.java b/src/main/java/org/onedatashare/transferservice/odstransferservice/service/step/ReaderWriterFactory.java index 04b6cac2..55fe8e70 100644 --- a/src/main/java/org/onedatashare/transferservice/odstransferservice/service/step/ReaderWriterFactory.java +++ b/src/main/java/org/onedatashare/transferservice/odstransferservice/service/step/ReaderWriterFactory.java @@ -1,6 +1,5 @@ package org.onedatashare.transferservice.odstransferservice.service.step; -import static org.onedatashare.transferservice.odstransferservice.constant.ODSConstants.*; import org.onedatashare.transferservice.odstransferservice.model.DataChunk; import org.onedatashare.transferservice.odstransferservice.model.EntityInfo; @@ -34,6 +33,9 @@ import org.springframework.batch.item.ItemWriter; import org.springframework.stereotype.Service; +import static org.onedatashare.transferservice.odstransferservice.constant.ODSConstants.FIVE_MB; +import static org.onedatashare.transferservice.odstransferservice.constant.ODSConstants.TWENTY_MB; + @Service public class ReaderWriterFactory { From a0ef77ac93729bff215c1e5699234274fc74165d Mon Sep 17 00:00:00 2001 From: Jacob Goldverg Date: Fri, 24 May 2024 14:36:36 -0400 Subject: [PATCH 110/150] fixing the config name --- .../service/step/dropbox/DropBoxWriterSmallFile.java | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/main/java/org/onedatashare/transferservice/odstransferservice/service/step/dropbox/DropBoxWriterSmallFile.java b/src/main/java/org/onedatashare/transferservice/odstransferservice/service/step/dropbox/DropBoxWriterSmallFile.java index cccada0e..18b1e051 100644 --- a/src/main/java/org/onedatashare/transferservice/odstransferservice/service/step/dropbox/DropBoxWriterSmallFile.java +++ b/src/main/java/org/onedatashare/transferservice/odstransferservice/service/step/dropbox/DropBoxWriterSmallFile.java @@ -10,6 +10,7 @@ import org.onedatashare.transferservice.odstransferservice.service.InfluxCache; import org.onedatashare.transferservice.odstransferservice.service.MetricsCollector; import org.onedatashare.transferservice.odstransferservice.service.step.ODSBaseWriter; +import org.onedatashare.transferservice.odstransferservice.utility.ODSUtility; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.batch.core.ExitStatus; @@ -35,8 +36,7 @@ public class DropBoxWriterSmallFile extends ODSBaseWriter implements ItemWriter< public DropBoxWriterSmallFile(OAuthEndpointCredential credential, EntityInfo fileInfo, MetricsCollector metricsCollector, InfluxCache influxCache) { super(metricsCollector, influxCache); - DbxRequestConfig config = DbxRequestConfig.newBuilder("dropbox/java-tutorial").build(); - this.dropboxClient = new DbxClientV2(config, credential.getToken()); + this.dropboxClient = new DbxClientV2(ODSUtility.dbxRequestConfig, credential.getToken()); this.fileInfo = fileInfo; smallFileUpload = new SmallFileUpload(); } From 7bbd1f1ddac2121564ff981bf01ec0147f30b0ba Mon Sep 17 00:00:00 2001 From: Jacob Goldverg Date: Wed, 5 Jun 2024 13:11:08 -0400 Subject: [PATCH 111/150] Now using Vault to fully discover configuration from Vault, need to test and see if the aws part is working fully --- pom.xml | 11 ++++++++++- .../odstransferservice/utility/ODSUtility.java | 5 +++-- src/main/resources/application-hsql.properties | 8 -------- src/main/resources/application.properties | 12 ++++++------ src/main/resources/bootstrap-aws.yml | 14 ++++++++++++++ src/main/resources/bootstrap-local.yml | 13 +++++++++++++ 6 files changed, 46 insertions(+), 17 deletions(-) create mode 100644 src/main/resources/bootstrap-aws.yml create mode 100644 src/main/resources/bootstrap-local.yml diff --git a/pom.xml b/pom.xml index 5b795391..2aa3185a 100644 --- a/pom.xml +++ b/pom.xml @@ -16,7 +16,7 @@ 21 - 2023.0.0-RC1 + 2023.0.2 @@ -42,6 +42,15 @@ com.fasterxml.jackson.datatype jackson-datatype-jsr310 + + org.springframework.cloud + spring-cloud-starter-vault-config + + + org.springframework.cloud + spring-cloud-starter-bootstrap + 4.1.3 + com.influxdb influxdb-client-java diff --git a/src/main/java/org/onedatashare/transferservice/odstransferservice/utility/ODSUtility.java b/src/main/java/org/onedatashare/transferservice/odstransferservice/utility/ODSUtility.java index 48295873..803158f5 100644 --- a/src/main/java/org/onedatashare/transferservice/odstransferservice/utility/ODSUtility.java +++ b/src/main/java/org/onedatashare/transferservice/odstransferservice/utility/ODSUtility.java @@ -11,7 +11,6 @@ import org.onedatashare.transferservice.odstransferservice.Enum.EndpointType; import org.onedatashare.transferservice.odstransferservice.model.DataChunk; import org.onedatashare.transferservice.odstransferservice.model.credential.OAuthEndpointCredential; -import org.springframework.beans.factory.annotation.Value; import java.io.ByteArrayInputStream; import java.io.IOException; @@ -23,8 +22,10 @@ public class ODSUtility { private static String odsClientID = "OneDataShare-DIDCLab"; - private static String gDriveClientId= System.getenv("ODS_GDRIVE_CLIENT_ID"); +// @Value("${gdrive.client.id}") + private static String gDriveClientId = System.getenv("ODS_GDRIVE_CLIENT_ID"); +// @Value("${gdrive.client.secret}") private static String gDriveClientSecret = System.getenv("ODS_GDRIVE_CLIENT_SECRET"); // @Value("${gdrive.appname}") diff --git a/src/main/resources/application-hsql.properties b/src/main/resources/application-hsql.properties index c48263a9..ced5e134 100644 --- a/src/main/resources/application-hsql.properties +++ b/src/main/resources/application-hsql.properties @@ -1,11 +1,3 @@ -#spring.datasource.driver-class-name=org.hsqldb.jdbc.JDBCDriver -##spring.datasource.url=jdbc:hsqldb:mem:testdb;DB_CLOSE_DELAY=-1; -#spring.datasource.url= -#spring.datasource.username=SA -#spring.datasource.password= -#spring.jpa.hibernate.ddl-auto=create -#spring.batch.jdbc.initialize-schema=always - spring.datasource.url=jdbc:hsqldb:mem:testdb;sql.enforce_strict_size=true;hsqldb.tx=mvcc;DB_CLOSE_DELAY=-1 spring.datasource.username=sa spring.datasource.password= diff --git a/src/main/resources/application.properties b/src/main/resources/application.properties index d2d28c29..4859e83a 100644 --- a/src/main/resources/application.properties +++ b/src/main/resources/application.properties @@ -1,9 +1,11 @@ -spring.application.name=${APP_NAME:ODS-Transfer-Node} +spring.application.name=${APP_NAME:TransferService} ods.user=${USER_NAME} - server.port=8092 - spring.main.allow-bean-definition-overriding=true + +gdrive.client.id=${ODS_GDRIVE_CLIENT_ID} +gdrive.client.secret=${ODS_GDRIVE_CLIENT_SECRET} + #Eureka config eureka.client.enabled=true eureka.client.serviceUrl.defaultZone=http://${EUREKA_USER:admin}:${EUREKA_PASS:admin}@${EUREKA_URI:localhost:8090}/eureka @@ -17,13 +19,11 @@ management.endpoints.web.exposure.include=* #Ignore non-null values spring.jackson.default-property-inclusion=NON_NULL - spring.datasource.hikari.maximum-pool-size=${HIKARI_POOL_SIZE:10000} spring.batch.job.enabled=false #RabitMQ ods.rabbitmq.exchange=ods.exchange - #for vfs nodes this should be the APP_NAME which is always lowercase. ods.rabbitmq.queue=${CONNECTOR_QUEUE:transferQueue} spring.rabbitmq.addresses=${AMPQ_ADDRESS} @@ -54,4 +54,4 @@ ods.influx.org=${INFLUX_ORG:OneDataShare} transfer.service.concurrency=${MAX_CONCURRENCY:32} transfer.service.parallelism=${MAX_PARALLELISM:32} -transfer.service.pipelining=${MAX_PIPELINING:32} \ No newline at end of file +transfer.service.pipelining=${MAX_PIPELINING:32} diff --git a/src/main/resources/bootstrap-aws.yml b/src/main/resources/bootstrap-aws.yml new file mode 100644 index 00000000..64eb6f2e --- /dev/null +++ b/src/main/resources/bootstrap-aws.yml @@ -0,0 +1,14 @@ +spring: + application: + name: TransferService + cloud: + vault: + uri: ${VAULT_URI} + authentication: aws_iam + aws-iam: + role: aws-role + kv: + enabled: true + profile-separator: '/' + default-context: TransferService + backend: secrets \ No newline at end of file diff --git a/src/main/resources/bootstrap-local.yml b/src/main/resources/bootstrap-local.yml new file mode 100644 index 00000000..8b4a94d7 --- /dev/null +++ b/src/main/resources/bootstrap-local.yml @@ -0,0 +1,13 @@ +spring: + application: + name: TransferService + cloud: + vault: + uri: ${VAULT_URI} + authentication: TOKEN + token: ${VAULT_ODS_TOKEN} + kv: + enabled: true + profile-separator: '/' + default-context: TransferService + backend: secrets \ No newline at end of file From 66347febce81c11a77ba0c90a6a49502213ab85d Mon Sep 17 00:00:00 2001 From: Jacob Goldverg Date: Wed, 5 Jun 2024 13:16:07 -0400 Subject: [PATCH 112/150] forgot to include the aws java sdk --- pom.xml | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/pom.xml b/pom.xml index 2aa3185a..aa8b571e 100644 --- a/pom.xml +++ b/pom.xml @@ -172,6 +172,10 @@ aws-java-sdk-s3 + com.amazonaws + aws-java-sdk + 1.12.736 + jakarta.servlet jakarta.servlet-api 5.0.0 From 196429ed630e2c81148d26be4937483dc2e3564c Mon Sep 17 00:00:00 2001 From: Jacob Goldverg Date: Wed, 5 Jun 2024 13:23:00 -0400 Subject: [PATCH 113/150] corrected the maven dependency for aws --- pom.xml | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/pom.xml b/pom.xml index aa8b571e..72463b39 100644 --- a/pom.xml +++ b/pom.xml @@ -173,9 +173,10 @@ com.amazonaws - aws-java-sdk + aws-java-sdk-core 1.12.736 - + + jakarta.servlet jakarta.servlet-api 5.0.0 From 2cc0424c636c6d4e141e056467e3dfea7c9bbcdc Mon Sep 17 00:00:00 2001 From: Jacob Goldverg Date: Wed, 5 Jun 2024 13:25:52 -0400 Subject: [PATCH 114/150] including region --- src/main/resources/bootstrap-aws.yml | 1 + 1 file changed, 1 insertion(+) diff --git a/src/main/resources/bootstrap-aws.yml b/src/main/resources/bootstrap-aws.yml index 64eb6f2e..1232df78 100644 --- a/src/main/resources/bootstrap-aws.yml +++ b/src/main/resources/bootstrap-aws.yml @@ -7,6 +7,7 @@ spring: authentication: aws_iam aws-iam: role: aws-role + region: us-east-2 kv: enabled: true profile-separator: '/' From de2cbc76f2f6248567129fc8c91a712ffaf106a9 Mon Sep 17 00:00:00 2001 From: Jacob Goldverg Date: Fri, 7 Jun 2024 16:42:35 -0400 Subject: [PATCH 115/150] using the wrong aws java sdk, using v2 now. --- pom.xml | 11 ++++++++--- 1 file changed, 8 insertions(+), 3 deletions(-) diff --git a/pom.xml b/pom.xml index 72463b39..ba9d77ed 100644 --- a/pom.xml +++ b/pom.xml @@ -172,9 +172,14 @@ aws-java-sdk-s3 - com.amazonaws - aws-java-sdk-core - 1.12.736 + software.amazon.awssdk + auth + 2.25.67 + + + software.amazon.awssdk + sts + 2.25.67 jakarta.servlet From ee20f1198d6b948a73fbd1185535aa0531ee87cb Mon Sep 17 00:00:00 2001 From: Jacob Goldverg Date: Sun, 9 Jun 2024 14:19:00 -0400 Subject: [PATCH 116/150] hopefully this works, vault login via cli works but not via spring yet --- src/main/resources/bootstrap-aws.yml | 11 ++++++----- 1 file changed, 6 insertions(+), 5 deletions(-) diff --git a/src/main/resources/bootstrap-aws.yml b/src/main/resources/bootstrap-aws.yml index 1232df78..a9c329aa 100644 --- a/src/main/resources/bootstrap-aws.yml +++ b/src/main/resources/bootstrap-aws.yml @@ -4,12 +4,13 @@ spring: cloud: vault: uri: ${VAULT_URI} - authentication: aws_iam - aws-iam: - role: aws-role - region: us-east-2 kv: enabled: true profile-separator: '/' default-context: TransferService - backend: secrets \ No newline at end of file + backend: secrets + authentication: aws_iam + aws-iam: + role: aws-role + region: us-east-2 + server-name: vault From b7c4b6b132e1373da9200605a30eed9ceff0cda6 Mon Sep 17 00:00:00 2001 From: Jacob Goldverg Date: Mon, 10 Jun 2024 16:19:55 -0400 Subject: [PATCH 117/150] approle authentication is fully working --- src/main/resources/bootstrap-aws.yml | 16 ---------------- src/main/resources/bootstrap-prod.yml | 15 +++++++++++++++ 2 files changed, 15 insertions(+), 16 deletions(-) create mode 100644 src/main/resources/bootstrap-prod.yml diff --git a/src/main/resources/bootstrap-aws.yml b/src/main/resources/bootstrap-aws.yml index a9c329aa..e69de29b 100644 --- a/src/main/resources/bootstrap-aws.yml +++ b/src/main/resources/bootstrap-aws.yml @@ -1,16 +0,0 @@ -spring: - application: - name: TransferService - cloud: - vault: - uri: ${VAULT_URI} - kv: - enabled: true - profile-separator: '/' - default-context: TransferService - backend: secrets - authentication: aws_iam - aws-iam: - role: aws-role - region: us-east-2 - server-name: vault diff --git a/src/main/resources/bootstrap-prod.yml b/src/main/resources/bootstrap-prod.yml new file mode 100644 index 00000000..58e96d73 --- /dev/null +++ b/src/main/resources/bootstrap-prod.yml @@ -0,0 +1,15 @@ +spring: + application: + name: TransferService + cloud: + vault: + uri: ${VAULT_URI} + kv: + enabled: true + profile-separator: '/' + default-context: prod/TransferService + backend: secrets + authentication: approle + app-role: + role-id: ${VAULT_ROLE_ID} + secret-id: ${VAULT_SECRET_ID} \ No newline at end of file From cb258fc242ead6778ee509a979cc462308ed6df7 Mon Sep 17 00:00:00 2001 From: Jacob Goldverg Date: Mon, 10 Jun 2024 16:20:09 -0400 Subject: [PATCH 118/150] testing aws auth now that I got that working --- src/main/resources/bootstrap-aws.yml | 14 ++++++++++++++ 1 file changed, 14 insertions(+) diff --git a/src/main/resources/bootstrap-aws.yml b/src/main/resources/bootstrap-aws.yml index e69de29b..83567823 100644 --- a/src/main/resources/bootstrap-aws.yml +++ b/src/main/resources/bootstrap-aws.yml @@ -0,0 +1,14 @@ +spring: + application: + name: TransferService + cloud: + vault: + uri: ${VAULT_URI} + kv: + enabled: true + profile-separator: '/' + default-context: prod/TransferService + backend: secrets + authentication: aws_iam + aws-iam: + role: aws-role \ No newline at end of file From 178d2f2e57456741de8a485120ad5e5c54900792 Mon Sep 17 00:00:00 2001 From: Jacob Goldverg Date: Mon, 17 Jun 2024 10:48:34 -0400 Subject: [PATCH 119/150] slight updates --- Dockerfile | 30 ++----------------- .../config/BatchConfig.java | 2 ++ 2 files changed, 4 insertions(+), 28 deletions(-) diff --git a/Dockerfile b/Dockerfile index 73e98aa8..fcc57b88 100644 --- a/Dockerfile +++ b/Dockerfile @@ -1,4 +1,4 @@ -FROM maven:3.9.6-amazoncorretto-21 AS build +FROM maven:3.9.7-amazoncorretto-21 AS build COPY src /home/app/src COPY pom.xml /home/app @@ -15,39 +15,13 @@ RUN pip install pmeter-ods COPY --from=build /home/app/target/ods-transfer-service-0.0.1-SNAPSHOT.jar /usr/local/lib/ods-transfer-service-0.0.1-SNAPSHOT.jar ENV PIP_ROOT_USER_ACTION=ignore -ENV NODE_NAME="${NODE_NAME}" -ENV USER_NAME="${USER_NAME}" -ENV APP_NAME="${USER_NAME}"-"${NODE_NAME}" - -ENV CONNECTOR_QUEUE="${APP_NAME}" -ENV ODS_GDRIVE_CLIENT_ID="${ODS_GDRIVE_CLIENT_ID}" -ENV ODS_GDRIVE_CLIENT_SECRET="${ODS_GDRIVE_CLIENT_SECRET}" -ENV ODS_GDRIVE_PROJECT_ID="onedatashare-dev" -ENV EUREKA_URI="${EUREKA_URI}" -ENV EUREKA_PASS="${EUREKA_PASS}" -ENV EUREKA_USER="${EUREKA_USER}" -ENV COCKROACH_URI="${COCKROACH_URI}" -ENV COCKROACH_USER="${COCKROACH_USER}" -ENV COCKROACH_PASS="${COCKROACH_PASS}" -ENV RMQ_ADDRESS="amqps://b-0e720b16-3ea7-4227-ad65-6cce3704121c.mq.us-east-2.amazonaws.com:5671" - -#use ODS user for your private queue. -#create creds through aws console -ENV AMPQ_USER="${AMPQ_USER}" -ENV AMPQ_PWD="${AMPQ_PWD}" #change to monitor the active NIC ENV PMETER_CLI_OPTIONS="-NS" ENV PMETER_NIC_INTERFACE="${PMETER_NIC_INTERFACE:-eth0}" -ENV INFLUX_ORG="${INFLUX_ORG}" -ENV INFLUX_BUCKET="${USER_NAME}" -ENV INFLUX_TOKEN="${INFLUX_TOKEN}" -ENV INFLUX_URI="https://influxdb.onedatashare.org" ENV ENABLE_PMETER="true" ENV PMETER_CRON_EXP="*/15 * * * * *" - -ENV OPTIMIZER_URL="${OPTIMIZER_URL}" -ENV OPTIMIZER_ENABLE="${OPTIMIZER_ENABLE}" +ENV SPRING_PROFILES_ACTIVE=aws,virtual,cockroach ENV PATH "/home/ods/.local/bin:${PATH}" diff --git a/src/main/java/org/onedatashare/transferservice/odstransferservice/config/BatchConfig.java b/src/main/java/org/onedatashare/transferservice/odstransferservice/config/BatchConfig.java index 570ff12d..0171e420 100644 --- a/src/main/java/org/onedatashare/transferservice/odstransferservice/config/BatchConfig.java +++ b/src/main/java/org/onedatashare/transferservice/odstransferservice/config/BatchConfig.java @@ -1,5 +1,6 @@ package org.onedatashare.transferservice.odstransferservice.config; +import com.amazonaws.regions.AwsRegionProvider; import com.fasterxml.jackson.annotation.JsonInclude; import com.fasterxml.jackson.databind.DeserializationFeature; import com.fasterxml.jackson.databind.ObjectMapper; @@ -51,5 +52,6 @@ public BackOffPolicy backOffPolicy() { backOffPolicy.setMaxInterval(TimeUnit.DAYS.toMillis(1)); return backOffPolicy; } + } From 97a3e34db4b233b95084c80829c4bd903f71575a Mon Sep 17 00:00:00 2001 From: Jacob Goldverg Date: Fri, 21 Jun 2024 11:21:26 -0400 Subject: [PATCH 120/150] Ok fixed the dev situation --- src/main/resources/bootstrap-local.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/main/resources/bootstrap-local.yml b/src/main/resources/bootstrap-local.yml index 8b4a94d7..113ab58d 100644 --- a/src/main/resources/bootstrap-local.yml +++ b/src/main/resources/bootstrap-local.yml @@ -9,5 +9,5 @@ spring: kv: enabled: true profile-separator: '/' - default-context: TransferService + default-context: dev/TransferService backend: secrets \ No newline at end of file From 1d42b952d6e1bd2e4720d8d020bf8baf0e70eda1 Mon Sep 17 00:00:00 2001 From: Jacob Goldverg Date: Fri, 21 Jun 2024 13:52:19 -0400 Subject: [PATCH 121/150] instead of IAM role chaining just gonna use two separate profiles, one for ec2 and the other for eks --- .../{bootstrap-aws.yml => bootstrap-ec2.yml} | 2 +- src/main/resources/bootstrap-eks.yml | 14 ++++++++++++++ 2 files changed, 15 insertions(+), 1 deletion(-) rename src/main/resources/{bootstrap-aws.yml => bootstrap-ec2.yml} (92%) create mode 100644 src/main/resources/bootstrap-eks.yml diff --git a/src/main/resources/bootstrap-aws.yml b/src/main/resources/bootstrap-ec2.yml similarity index 92% rename from src/main/resources/bootstrap-aws.yml rename to src/main/resources/bootstrap-ec2.yml index 83567823..eff39d96 100644 --- a/src/main/resources/bootstrap-aws.yml +++ b/src/main/resources/bootstrap-ec2.yml @@ -11,4 +11,4 @@ spring: backend: secrets authentication: aws_iam aws-iam: - role: aws-role \ No newline at end of file + role: ec2-role \ No newline at end of file diff --git a/src/main/resources/bootstrap-eks.yml b/src/main/resources/bootstrap-eks.yml new file mode 100644 index 00000000..11b8041e --- /dev/null +++ b/src/main/resources/bootstrap-eks.yml @@ -0,0 +1,14 @@ +spring: + application: + name: TransferService + cloud: + vault: + uri: ${VAULT_URI} + kv: + enabled: true + profile-separator: '/' + default-context: prod/TransferService + backend: secrets + authentication: aws_iam + aws-iam: + role: eks-role \ No newline at end of file From 4e99dc3b06db6fd6c78238d8104e674dcb223103 Mon Sep 17 00:00:00 2001 From: Jacob Goldverg Date: Mon, 12 Aug 2024 18:15:40 -0400 Subject: [PATCH 122/150] fixed the connection pooling issue --- .../transferservice/odstransferservice/OdsTransferService.java | 1 - src/main/resources/application-cockroach.properties | 2 +- src/main/resources/application.properties | 1 - 3 files changed, 1 insertion(+), 3 deletions(-) diff --git a/src/main/java/org/onedatashare/transferservice/odstransferservice/OdsTransferService.java b/src/main/java/org/onedatashare/transferservice/odstransferservice/OdsTransferService.java index 0a890310..c6b8bcfb 100644 --- a/src/main/java/org/onedatashare/transferservice/odstransferservice/OdsTransferService.java +++ b/src/main/java/org/onedatashare/transferservice/odstransferservice/OdsTransferService.java @@ -1,6 +1,5 @@ package org.onedatashare.transferservice.odstransferservice; -import org.springframework.batch.core.configuration.annotation.EnableBatchProcessing; import org.springframework.boot.SpringApplication; import org.springframework.boot.autoconfigure.SpringBootApplication; import org.springframework.cloud.client.discovery.EnableDiscoveryClient; diff --git a/src/main/resources/application-cockroach.properties b/src/main/resources/application-cockroach.properties index d791511b..49a94dd3 100644 --- a/src/main/resources/application-cockroach.properties +++ b/src/main/resources/application-cockroach.properties @@ -4,7 +4,7 @@ spring.datasource.driver-class-name=org.postgresql.Driver spring.datasource.url=jdbc:${COCKROACH_URI:postgresql://localhost:26257/job_details?sslmode=disable} spring.datasource.username=${COCKROACH_USER:root} spring.datasource.password=${COCKROACH_PASS:root} -spring.datasource.hikari.maximum-pool-size=${HIKARI_POOL_SIZE:10000} +spring.datasource.hikari.maximum-pool-size=${HIKARI_POOL_SIZE:1024} spring.jpa.show-sql=true spring.jpa.properties.hibernate.format_sql=true diff --git a/src/main/resources/application.properties b/src/main/resources/application.properties index 4859e83a..451f8832 100644 --- a/src/main/resources/application.properties +++ b/src/main/resources/application.properties @@ -19,7 +19,6 @@ management.endpoints.web.exposure.include=* #Ignore non-null values spring.jackson.default-property-inclusion=NON_NULL -spring.datasource.hikari.maximum-pool-size=${HIKARI_POOL_SIZE:10000} spring.batch.job.enabled=false #RabitMQ From 1f17e36404258658b371ae195d4182d21f5c6fcd Mon Sep 17 00:00:00 2001 From: Jacob Goldverg Date: Mon, 12 Aug 2024 18:31:34 -0400 Subject: [PATCH 123/150] controlling sizing --- src/main/resources/application-cockroach.properties | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/src/main/resources/application-cockroach.properties b/src/main/resources/application-cockroach.properties index 49a94dd3..8cd814ae 100644 --- a/src/main/resources/application-cockroach.properties +++ b/src/main/resources/application-cockroach.properties @@ -4,7 +4,12 @@ spring.datasource.driver-class-name=org.postgresql.Driver spring.datasource.url=jdbc:${COCKROACH_URI:postgresql://localhost:26257/job_details?sslmode=disable} spring.datasource.username=${COCKROACH_USER:root} spring.datasource.password=${COCKROACH_PASS:root} +#Hikari sizing controls +spring.datasource.hikari.minimum-idle=5 spring.datasource.hikari.maximum-pool-size=${HIKARI_POOL_SIZE:1024} +spring.datasource.hikari.idle-timeout=100000 +spring.datasource.hikari.max-lifetime=1800000 +spring.datasource.hikari.connection-timeout=30000 spring.jpa.show-sql=true spring.jpa.properties.hibernate.format_sql=true From 2870120e81445056c137b57fdf420c1523271d57 Mon Sep 17 00:00:00 2001 From: Jacob Goldverg Date: Thu, 12 Sep 2024 01:35:36 -0400 Subject: [PATCH 124/150] Added in hazelcast registration and de-registration for now. I hate that we use the nodeName as the unique parameter, but the only other option is to create some kind of config file that persists this to the file system but even that I am not a huge fan of --- pom.xml | 6 ++ .../config/HazelcastClientConfig.java | 61 +++++++++++ .../controller/TransferController.java | 13 +-- .../message/TransferJobRequestHandler.java | 9 +- .../model/FileTransferNodeMetaData.java | 23 ++++ .../DatabaseService/InfluxIOService.java | 10 +- ...sferNodeRegistrationLifeCycleListener.java | 73 +++++++++++++ .../FileTransferNodeRegistrationService.java | 63 +++++++++++ .../service/JobControl.java | 21 +++- .../listner/JobCompletionListener.java | 47 +++----- src/main/resources/application.properties | 1 + src/main/resources/bootstrap-local.yml | 2 +- ...leTransferNodeRegistrationServiceTest.java | 101 ++++++++++++++++++ 13 files changed, 373 insertions(+), 57 deletions(-) create mode 100644 src/main/java/org/onedatashare/transferservice/odstransferservice/config/HazelcastClientConfig.java create mode 100644 src/main/java/org/onedatashare/transferservice/odstransferservice/model/FileTransferNodeMetaData.java create mode 100644 src/main/java/org/onedatashare/transferservice/odstransferservice/service/FileTransferNodeRegistrationLifeCycleListener.java create mode 100644 src/main/java/org/onedatashare/transferservice/odstransferservice/service/FileTransferNodeRegistrationService.java create mode 100644 src/test/java/org/onedatashare/transferservice/odstransferservice/service/FileTransferNodeRegistrationServiceTest.java diff --git a/pom.xml b/pom.xml index ba9d77ed..3863b584 100644 --- a/pom.xml +++ b/pom.xml @@ -42,6 +42,12 @@ com.fasterxml.jackson.datatype jackson-datatype-jsr310 + + com.hazelcast + hazelcast + 5.5.0 + + org.springframework.cloud spring-cloud-starter-vault-config diff --git a/src/main/java/org/onedatashare/transferservice/odstransferservice/config/HazelcastClientConfig.java b/src/main/java/org/onedatashare/transferservice/odstransferservice/config/HazelcastClientConfig.java new file mode 100644 index 00000000..76371bc4 --- /dev/null +++ b/src/main/java/org/onedatashare/transferservice/odstransferservice/config/HazelcastClientConfig.java @@ -0,0 +1,61 @@ +package org.onedatashare.transferservice.odstransferservice.config; + +import com.fasterxml.jackson.databind.ObjectMapper; +import com.hazelcast.client.HazelcastClient; +import com.hazelcast.client.config.ClientConfig; +import com.hazelcast.core.HazelcastInstance; +import com.hazelcast.core.HazelcastJsonValue; +import com.hazelcast.map.IMap; +import org.onedatashare.transferservice.odstransferservice.service.FileTransferNodeRegistrationLifeCycleListener; +import org.springframework.beans.factory.annotation.Qualifier; +import org.springframework.context.annotation.Bean; +import org.springframework.context.annotation.Configuration; +import org.springframework.context.annotation.Profile; +import org.springframework.core.env.Environment; + +import java.util.UUID; + +@Configuration +public class HazelcastClientConfig { + + private final Environment environment; + private final ObjectMapper objectMapper; + + public HazelcastClientConfig(Environment environment, ObjectMapper objectMapper) { + this.environment = environment; + this.objectMapper = objectMapper; + } + + @Bean + @Qualifier("clientConfig") + @Profile("local") + public ClientConfig devClientConfig() { + ClientConfig clientConfig = new ClientConfig(); + clientConfig.setClusterName("dev-scheduler-cluster"); + clientConfig.getNetworkConfig().addAddress("127.0.0.1"); + return clientConfig; + } + + @Bean + @Qualifier("clientConfig") + @Profile({"prod", "eks", "ec2",}) + public ClientConfig prodClientConfig() { + ClientConfig clientConfig = new ClientConfig(); + clientConfig.setClusterName("prod-scheduler-cluster"); + clientConfig.getNetworkConfig().getEurekaConfig().setEnabled(true); + return clientConfig; + } + + @Bean + public HazelcastInstance hazelcastInstance(ClientConfig clientConfig) { + HazelcastInstance hazelcastInstance = HazelcastClient.newHazelcastClient(clientConfig); + FileTransferNodeRegistrationLifeCycleListener fileTransferNodeRegistrationEventListener = new FileTransferNodeRegistrationLifeCycleListener(hazelcastInstance, environment, objectMapper); + hazelcastInstance.getLifecycleService().addLifecycleListener(fileTransferNodeRegistrationEventListener); + return hazelcastInstance; + } + + @Bean + public IMap fileTransferNodeRegistrationMap(HazelcastInstance hazelcastInstance) { + return hazelcastInstance.getMap("file-transfer-node-map"); + } +} diff --git a/src/main/java/org/onedatashare/transferservice/odstransferservice/controller/TransferController.java b/src/main/java/org/onedatashare/transferservice/odstransferservice/controller/TransferController.java index 12b4c66f..bc56cb0b 100644 --- a/src/main/java/org/onedatashare/transferservice/odstransferservice/controller/TransferController.java +++ b/src/main/java/org/onedatashare/transferservice/odstransferservice/controller/TransferController.java @@ -28,14 +28,14 @@ public class TransferController { Logger logger = LoggerFactory.getLogger(TransferController.class); - JobControl jc; + JobControl jobControl; JobLauncher jobLauncher; JobParamService jobParamService; public TransferController(JobControl jobControl, JobLauncher jobLauncher, JobParamService jobParamService) { - this.jc = jobControl; + this.jobControl = jobControl; this.jobLauncher = jobLauncher; this.jobParamService = jobParamService; @@ -43,12 +43,9 @@ public TransferController(JobControl jobControl, JobLauncher jobLauncher, JobPar @RequestMapping(value = "/start", method = RequestMethod.POST) @Async - public ResponseEntity start(@RequestBody TransferJobRequest request) throws Exception { - logger.info("Controller Entry point"); - JobParameters parameters = jobParamService.translate(new JobParametersBuilder(), request); - Job job = jc.concurrentJobDefinition(request); - JobExecution jobExecution = jobLauncher.run(job, parameters); - return ResponseEntity.status(HttpStatus.OK).body("Your batch job has been submitted with \n ID: " + jobExecution.getJobId()); + public ResponseEntity start(@RequestBody TransferJobRequest request) throws Exception { + JobExecution jobExecution = this.jobControl.runJob(request); + return ResponseEntity.ok(jobExecution.getJobId()); } } diff --git a/src/main/java/org/onedatashare/transferservice/odstransferservice/message/TransferJobRequestHandler.java b/src/main/java/org/onedatashare/transferservice/odstransferservice/message/TransferJobRequestHandler.java index 039c0ce9..ed0db7da 100644 --- a/src/main/java/org/onedatashare/transferservice/odstransferservice/message/TransferJobRequestHandler.java +++ b/src/main/java/org/onedatashare/transferservice/odstransferservice/message/TransferJobRequestHandler.java @@ -22,18 +22,14 @@ public class TransferJobRequestHandler implements MessageHandler { private final ObjectMapper objectMapper; - private final JobParamService jobParamService; - private final JobLauncher jobLauncher; private final JobControl jobControl; private final ExpanderFactory expanderFactory; Logger logger = LoggerFactory.getLogger(TransferJobRequestHandler.class); - public TransferJobRequestHandler(ObjectMapper messageObjectMapper, JobParamService jobParamService, JobLauncher jobLauncher, JobControl jobControl, ExpanderFactory expanderFactory) { + public TransferJobRequestHandler(ObjectMapper messageObjectMapper, JobControl jobControl, ExpanderFactory expanderFactory) { this.objectMapper = messageObjectMapper; - this.jobParamService = jobParamService; - this.jobLauncher = jobLauncher; this.jobControl = jobControl; this.expanderFactory = expanderFactory; } @@ -45,9 +41,8 @@ public void messageHandler(Message message) throws IOException { logger.info("Job Received: {}", request.toString()); List fileInfo = expanderFactory.getExpander(request.getSource()); request.getSource().setInfoList(fileInfo); - JobParameters parameters = jobParamService.translate(new JobParametersBuilder(), request); try { - jobLauncher.run(jobControl.concurrentJobDefinition(request), parameters); + this.jobControl.runJob(request); } catch (Exception e) { logger.error(e.getMessage()); } diff --git a/src/main/java/org/onedatashare/transferservice/odstransferservice/model/FileTransferNodeMetaData.java b/src/main/java/org/onedatashare/transferservice/odstransferservice/model/FileTransferNodeMetaData.java new file mode 100644 index 00000000..013573c2 --- /dev/null +++ b/src/main/java/org/onedatashare/transferservice/odstransferservice/model/FileTransferNodeMetaData.java @@ -0,0 +1,23 @@ +package org.onedatashare.transferservice.odstransferservice.model; + +import lombok.AllArgsConstructor; +import lombok.Builder; +import lombok.Data; + +import java.io.Serializable; +import java.util.UUID; + +@Data +@AllArgsConstructor +@Builder +public class FileTransferNodeMetaData implements Serializable { + + //ods metrics + String odsOwner; + String nodeName; + UUID nodeUuid; + Boolean runningJob; + Boolean online; + long jobId; + UUID jobUuid; +} diff --git a/src/main/java/org/onedatashare/transferservice/odstransferservice/service/DatabaseService/InfluxIOService.java b/src/main/java/org/onedatashare/transferservice/odstransferservice/service/DatabaseService/InfluxIOService.java index 62760fd7..3f0e8bea 100644 --- a/src/main/java/org/onedatashare/transferservice/odstransferservice/service/DatabaseService/InfluxIOService.java +++ b/src/main/java/org/onedatashare/transferservice/odstransferservice/service/DatabaseService/InfluxIOService.java @@ -19,8 +19,8 @@ public class InfluxIOService { private final InfluxDBClient influxClient; Logger logger = LoggerFactory.getLogger(InfluxIOService.class); - @Value("${ods.influx.bucket}") - private String bucketName; + @Value("${ods.user}") + private String odsUserName; @Value("${ods.influx.org}") String org; @@ -35,13 +35,13 @@ public InfluxIOService(InfluxDBClient influxClient) { @PostConstruct public void postConstruct() { - this.reconfigureBucketForNewJob(this.bucketName); + this.reconfigureBucketForNewJob(odsUserName); } public void reconfigureBucketForNewJob(String ownerId) { - logger.info("********* Reconfiguring the Bucket ***********"); + logger.info("********* Reconfiguring the Bucket to Owner {}***********", ownerId); if (ownerId == null) { - bucket = influxClient.getBucketsApi().findBucketByName(this.bucketName); + bucket = influxClient.getBucketsApi().findBucketByName(this.odsUserName); } else { bucket = influxClient.getBucketsApi().findBucketByName(ownerId); } diff --git a/src/main/java/org/onedatashare/transferservice/odstransferservice/service/FileTransferNodeRegistrationLifeCycleListener.java b/src/main/java/org/onedatashare/transferservice/odstransferservice/service/FileTransferNodeRegistrationLifeCycleListener.java new file mode 100644 index 00000000..5b2aaf2c --- /dev/null +++ b/src/main/java/org/onedatashare/transferservice/odstransferservice/service/FileTransferNodeRegistrationLifeCycleListener.java @@ -0,0 +1,73 @@ +package org.onedatashare.transferservice.odstransferservice.service; + +import com.fasterxml.jackson.core.JsonProcessingException; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.hazelcast.core.HazelcastInstance; +import com.hazelcast.core.HazelcastJsonValue; +import com.hazelcast.core.LifecycleEvent; +import com.hazelcast.core.LifecycleListener; +import com.hazelcast.map.IMap; +import org.onedatashare.transferservice.odstransferservice.model.FileTransferNodeMetaData; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.springframework.core.env.Environment; + +import java.util.UUID; + +public class FileTransferNodeRegistrationLifeCycleListener implements LifecycleListener { + + private final HazelcastInstance hazelcastInstance; + private final ObjectMapper objectMapper; + private final IMap fileTransferNodeMap; + private final String appName; + private final String odsOwner; + private final UUID nodeUuid; + Logger logger = LoggerFactory.getLogger(FileTransferNodeRegistrationService.class); + + public FileTransferNodeRegistrationLifeCycleListener(HazelcastInstance hazelcastInstance, Environment environment, ObjectMapper objectMapper) { + this.hazelcastInstance = hazelcastInstance; + this.appName = environment.getProperty("spring.application.name"); + this.odsOwner = environment.getProperty("ods.user"); + this.objectMapper = objectMapper; + this.nodeUuid = hazelcastInstance.getLocalEndpoint().getUuid(); + this.fileTransferNodeMap = hazelcastInstance.getMap("file-transfer-node-map"); + } + + @Override + public void stateChanged(LifecycleEvent event) { + if (event.getState() == LifecycleEvent.LifecycleState.CLIENT_CONNECTED) { + FileTransferNodeMetaData fileTransferNodeMetaData = FileTransferNodeMetaData.builder() + .nodeUuid(this.hazelcastInstance.getLocalEndpoint().getUuid()) + .online(true) + .nodeName(this.appName) + .odsOwner(this.odsOwner) + .nodeUuid(this.nodeUuid) + .runningJob(false) + .jobUuid(new UUID(0, 0)) + .jobId(-1) + .build(); + try { + String json = this.objectMapper.writeValueAsString(fileTransferNodeMetaData); + logger.info("Registering client: {}", fileTransferNodeMetaData); + this.fileTransferNodeMap.put(this.appName, new HazelcastJsonValue(json)); + } catch (JsonProcessingException e) { + e.printStackTrace(); + } + } + if (event.getState() == LifecycleEvent.LifecycleState.SHUTTING_DOWN) { + try { + String jsonValue = this.fileTransferNodeMap.get(this.nodeUuid).getValue(); + FileTransferNodeMetaData fileTransferNodeMetaData = this.objectMapper.readValue(jsonValue, FileTransferNodeMetaData.class); + fileTransferNodeMetaData.setRunningJob(false); + fileTransferNodeMetaData.setOnline(false); + logger.info("De-Registering client: {}",fileTransferNodeMetaData); + jsonValue = this.objectMapper.writeValueAsString(fileTransferNodeMetaData); + this.fileTransferNodeMap.put(this.appName, new HazelcastJsonValue(jsonValue)); + } catch (JsonProcessingException e) { + e.printStackTrace(); + } + } + + + } +} diff --git a/src/main/java/org/onedatashare/transferservice/odstransferservice/service/FileTransferNodeRegistrationService.java b/src/main/java/org/onedatashare/transferservice/odstransferservice/service/FileTransferNodeRegistrationService.java new file mode 100644 index 00000000..e1a80122 --- /dev/null +++ b/src/main/java/org/onedatashare/transferservice/odstransferservice/service/FileTransferNodeRegistrationService.java @@ -0,0 +1,63 @@ +package org.onedatashare.transferservice.odstransferservice.service; + +import com.fasterxml.jackson.core.JsonProcessingException; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.hazelcast.core.HazelcastInstance; +import com.hazelcast.core.HazelcastJsonValue; +import com.hazelcast.map.IMap; +import jakarta.annotation.PostConstruct; +import lombok.SneakyThrows; +import org.onedatashare.transferservice.odstransferservice.constant.ODSConstants; +import org.onedatashare.transferservice.odstransferservice.model.FileTransferNodeMetaData; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.springframework.batch.core.JobExecution; +import org.springframework.core.env.Environment; +import org.springframework.stereotype.Service; + +import java.util.UUID; + +@Service +public class FileTransferNodeRegistrationService { + + private final IMap fileTransferNodeRegistrationMap; + private final UUID nodeUuid; + private final String appName; + private final String odsOwner; + private final ObjectMapper objectMapper; + private final Logger logger = LoggerFactory.getLogger(FileTransferNodeRegistrationService.class); + + public FileTransferNodeRegistrationService(HazelcastInstance hazelcastInstance, IMap fileTransferNodeRegistrationMap, Environment environment, ObjectMapper objectMapper) { + this.fileTransferNodeRegistrationMap = fileTransferNodeRegistrationMap; + this.nodeUuid = hazelcastInstance.getLocalEndpoint().getUuid(); + this.appName = environment.getProperty("spring.application.name"); + this.odsOwner = environment.getProperty("ods.user"); + this.objectMapper = objectMapper; + } + + @SneakyThrows + @PostConstruct + public void init(){ + this.updateRegistrationInHazelcast(null); + } + + public void updateRegistrationInHazelcast(JobExecution jobExecution) throws JsonProcessingException { + var metaDataBuilder = FileTransferNodeMetaData.builder(); + if (jobExecution == null) { + metaDataBuilder.jobId(-1L); + metaDataBuilder.runningJob(false); + metaDataBuilder.jobUuid(new UUID(0, 0)); + } else { + metaDataBuilder.jobId(jobExecution.getJobId()); + metaDataBuilder.runningJob(true); + metaDataBuilder.jobUuid(UUID.fromString(jobExecution.getJobParameters().getString(ODSConstants.JOB_UUID))); + } + metaDataBuilder.online(true); + metaDataBuilder.nodeName(this.appName); + metaDataBuilder.odsOwner(this.odsOwner); + metaDataBuilder.nodeUuid(this.nodeUuid); + String jsonValue = this.objectMapper.writeValueAsString(metaDataBuilder.build()); + logger.info("Registering node: {}", jsonValue); + this.fileTransferNodeRegistrationMap.put(this.appName, new HazelcastJsonValue(jsonValue)); + } +} diff --git a/src/main/java/org/onedatashare/transferservice/odstransferservice/service/JobControl.java b/src/main/java/org/onedatashare/transferservice/odstransferservice/service/JobControl.java index c2c1433e..ab4de771 100644 --- a/src/main/java/org/onedatashare/transferservice/odstransferservice/service/JobControl.java +++ b/src/main/java/org/onedatashare/transferservice/odstransferservice/service/JobControl.java @@ -11,11 +11,15 @@ import org.onedatashare.transferservice.odstransferservice.service.step.ReaderWriterFactory; import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import org.springframework.batch.core.Job; +import org.springframework.batch.core.*; import org.springframework.batch.core.job.builder.FlowBuilder; import org.springframework.batch.core.job.builder.JobBuilder; import org.springframework.batch.core.job.flow.Flow; +import org.springframework.batch.core.launch.JobLauncher; +import org.springframework.batch.core.repository.JobExecutionAlreadyRunningException; +import org.springframework.batch.core.repository.JobInstanceAlreadyCompleteException; import org.springframework.batch.core.repository.JobRepository; +import org.springframework.batch.core.repository.JobRestartException; import org.springframework.batch.core.step.builder.SimpleStepBuilder; import org.springframework.batch.core.step.builder.StepBuilder; import org.springframework.beans.factory.annotation.Autowired; @@ -59,6 +63,14 @@ public class JobControl { @Autowired BackOffPolicy backOffPolicy; + @Autowired + JobLauncher jobLauncher; + + @Autowired + JobParamService jobParamService; + + JobExecution latestJobExecution; + private List createConcurrentFlow(TransferJobRequest request) { String basePath = request.getSource().getFileSourcePath(); return request.getSource().getInfoList().stream().map(file -> { @@ -100,4 +112,11 @@ public Job concurrentJobDefinition(TransferJobRequest request) { .build(); } + public JobExecution runJob(TransferJobRequest transferJobRequest) throws JobInstanceAlreadyCompleteException, JobExecutionAlreadyRunningException, JobParametersInvalidException, JobRestartException { + Job job = this.concurrentJobDefinition(transferJobRequest); + JobParameters jobParameters = this.jobParamService.translate(new JobParametersBuilder(), transferJobRequest); + this.latestJobExecution = this.jobLauncher.run(job, jobParameters); + return this.latestJobExecution; + } + } \ No newline at end of file diff --git a/src/main/java/org/onedatashare/transferservice/odstransferservice/service/listner/JobCompletionListener.java b/src/main/java/org/onedatashare/transferservice/odstransferservice/service/listner/JobCompletionListener.java index 76f22ce6..bc658d01 100644 --- a/src/main/java/org/onedatashare/transferservice/odstransferservice/service/listner/JobCompletionListener.java +++ b/src/main/java/org/onedatashare/transferservice/odstransferservice/service/listner/JobCompletionListener.java @@ -1,10 +1,9 @@ package org.onedatashare.transferservice.odstransferservice.service.listner; -import org.onedatashare.transferservice.odstransferservice.constant.ODSConstants; -import org.onedatashare.transferservice.odstransferservice.model.optimizer.OptimizerCreateRequest; -import org.onedatashare.transferservice.odstransferservice.model.optimizer.OptimizerDeleteRequest; +import com.fasterxml.jackson.core.JsonProcessingException; import org.onedatashare.transferservice.odstransferservice.pools.ThreadPoolContract; import org.onedatashare.transferservice.odstransferservice.service.ConnectionBag; +import org.onedatashare.transferservice.odstransferservice.service.FileTransferNodeRegistrationService; import org.onedatashare.transferservice.odstransferservice.service.MetricsCollector; import org.onedatashare.transferservice.odstransferservice.service.OptimizerService; import org.slf4j.Logger; @@ -12,17 +11,12 @@ import org.springframework.batch.core.JobExecution; import org.springframework.batch.core.JobExecutionListener; import org.springframework.beans.factory.annotation.Autowired; -import org.springframework.beans.factory.annotation.Value; -import org.springframework.core.env.Environment; import org.springframework.scheduling.annotation.Async; import org.springframework.stereotype.Service; import java.time.Duration; import java.util.Set; -import static org.onedatashare.transferservice.odstransferservice.constant.ODSConstants.JOB_UUID; -import static org.onedatashare.transferservice.odstransferservice.constant.ODSConstants.OWNER_ID; - @Service public class JobCompletionListener implements JobExecutionListener { @@ -36,22 +30,10 @@ public class JobCompletionListener implements JobExecutionListener { OptimizerService optimizerService; - - @Value("${spring.application.name}") - private String appName; - - @Value("${transfer.service.parallelism}") - int maxParallel; - - @Value("${transfer.service.concurrency}") - int maxConc; - - @Value("${transfer.service.pipelining}") - int maxPipe; boolean optimizerEnable; @Autowired - Environment environment; + FileTransferNodeRegistrationService fileTransferNodeRegistrationService; public JobCompletionListener(OptimizerService optimizerService, MetricsCollector metricsCollector, ConnectionBag connectionBag, ThreadPoolContract threadPool, Set jobIds) { this.optimizerService = optimizerService; @@ -67,17 +49,11 @@ public JobCompletionListener(OptimizerService optimizerService, MetricsCollector @Async public void beforeJob(JobExecution jobExecution) { logger.info("*****Job Execution start Time***** : {} with jobId={}", jobExecution.getStartTime(), jobExecution.getJobId()); - long fileCount = jobExecution.getJobParameters().getLong(ODSConstants.FILE_COUNT); this.jobIds.add(jobExecution.getJobId()); - String optimizerType = jobExecution.getJobParameters().getString(ODSConstants.OPTIMIZER); - String jobUuid = jobExecution.getJobParameters().getString(JOB_UUID); - String userId = jobExecution.getJobParameters().getString(OWNER_ID); - if (optimizerType != null) { - if (!optimizerType.equals("None") && !optimizerType.isEmpty()) { - OptimizerCreateRequest createRequest = new OptimizerCreateRequest(userId, appName, maxConc, maxParallel, maxPipe, optimizerType, fileCount, jobExecution.getJobId(), this.environment.getActiveProfiles()[0], jobUuid); - optimizerService.createOptimizerBlocking(createRequest); - this.optimizerEnable = true; - } + try { + this.fileTransferNodeRegistrationService.updateRegistrationInHazelcast(jobExecution); + } catch (JsonProcessingException e) { + logger.error("Failed to update status of FTN inside of Hazelcast for job start. Exception \n {}", e.getMessage()); } } @@ -87,12 +63,13 @@ public void afterJob(JobExecution jobExecution) { logger.info("*****Job Execution End Time**** : {}", jobExecution.getEndTime()); logger.info("Total Job Time in seconds: {}", Duration.between(jobExecution.getStartTime(), jobExecution.getEndTime()).toSeconds()); connectionBag.closePools(); - if (this.optimizerEnable) { - this.optimizerService.deleteOptimizerBlocking(new OptimizerDeleteRequest(appName)); - this.optimizerEnable = false; - } this.threadPool.clearPools(); System.gc(); + try { + this.fileTransferNodeRegistrationService.updateRegistrationInHazelcast(null); + } catch (JsonProcessingException e) { + logger.error("Failed to update status of FTN inside of Hazelcast for job end. Exception \n {}", e.getMessage()); + } } } diff --git a/src/main/resources/application.properties b/src/main/resources/application.properties index 451f8832..41f89e6d 100644 --- a/src/main/resources/application.properties +++ b/src/main/resources/application.properties @@ -1,6 +1,7 @@ spring.application.name=${APP_NAME:TransferService} ods.user=${USER_NAME} server.port=8092 +server.shutdown=graceful spring.main.allow-bean-definition-overriding=true gdrive.client.id=${ODS_GDRIVE_CLIENT_ID} diff --git a/src/main/resources/bootstrap-local.yml b/src/main/resources/bootstrap-local.yml index 113ab58d..3377c466 100644 --- a/src/main/resources/bootstrap-local.yml +++ b/src/main/resources/bootstrap-local.yml @@ -9,5 +9,5 @@ spring: kv: enabled: true profile-separator: '/' - default-context: dev/TransferService + default-context: prod/TransferService backend: secrets \ No newline at end of file diff --git a/src/test/java/org/onedatashare/transferservice/odstransferservice/service/FileTransferNodeRegistrationServiceTest.java b/src/test/java/org/onedatashare/transferservice/odstransferservice/service/FileTransferNodeRegistrationServiceTest.java new file mode 100644 index 00000000..03a5def9 --- /dev/null +++ b/src/test/java/org/onedatashare/transferservice/odstransferservice/service/FileTransferNodeRegistrationServiceTest.java @@ -0,0 +1,101 @@ +package org.onedatashare.transferservice.odstransferservice.service; + +import com.fasterxml.jackson.core.JsonProcessingException; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.hazelcast.core.Hazelcast; +import com.hazelcast.core.HazelcastInstance; +import com.hazelcast.core.HazelcastJsonValue; +import com.hazelcast.map.IMap; +import org.junit.Assert; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; +import org.mockito.Mock; +import org.mockito.junit.jupiter.MockitoExtension; +import org.onedatashare.transferservice.odstransferservice.constant.ODSConstants; +import org.onedatashare.transferservice.odstransferservice.model.FileTransferNodeMetaData; +import org.springframework.batch.core.JobExecution; +import org.springframework.batch.core.JobParameters; +import org.springframework.core.env.Environment; + +import java.util.UUID; + +import static org.mockito.Mockito.when; + +@ExtendWith(MockitoExtension.class) +public class FileTransferNodeRegistrationServiceTest { + + @Mock + private Environment environment; + + @Mock + private JobExecution jobExecution; + + @Mock + private JobParameters jobParameters; + + private HazelcastInstance hazelcastInstance; + String appName = "testAppName@random.org"; + + private IMap fileTransferNodeMap; + + FileTransferNodeRegistrationService testObj; + ObjectMapper objectMapper = new ObjectMapper(); + + private UUID testJobUuid = UUID.randomUUID(); + + public FileTransferNodeRegistrationServiceTest() { + this.hazelcastInstance = Hazelcast.newHazelcastInstance(); + this.fileTransferNodeMap = this.hazelcastInstance.getMap("testNodeRegistrationMap"); + } + + @BeforeEach + public void setUp() { + when(environment.getProperty("spring.application.name")).thenReturn(this.appName); + when(environment.getProperty("ods.user")).thenReturn("testUser"); + testObj = new FileTransferNodeRegistrationService(hazelcastInstance, fileTransferNodeMap, environment, this.objectMapper); + } + + @Test + public void testInitialNodeRegistrationInMap() throws JsonProcessingException { + testObj.updateRegistrationInHazelcast(null); + Assert.assertEquals(this.fileTransferNodeMap.containsKey(this.appName), true); + HazelcastJsonValue jsonValue = this.fileTransferNodeMap.get(this.appName); + FileTransferNodeMetaData testData = this.objectMapper.readValue(jsonValue.getValue(), FileTransferNodeMetaData.class); + Assert.assertNotNull(testData); + Assert.assertEquals(this.hazelcastInstance.getLocalEndpoint().getUuid(), testData.getNodeUuid()); + Assert.assertEquals("testAppName", testData.getNodeName()); + Assert.assertEquals("testUser", testData.getOdsOwner()); + Assert.assertEquals(-1L, testData.getJobId()); + Assert.assertEquals(new UUID(0, 0), testData.getJobUuid()); + Assert.assertEquals(false, testData.getRunningJob()); + Assert.assertEquals(true, testData.getOnline()); + } + + @Test + public void testRegisterWithJobExecution() throws JsonProcessingException { + testObj.updateRegistrationInHazelcast(this.jobExecution); + when(jobExecution.getJobId()).thenReturn(1L); + when(jobExecution.getJobParameters()).thenReturn(this.jobParameters); + when(this.jobParameters.getString(ODSConstants.JOB_UUID)).thenReturn(this.testJobUuid.toString()); + testObj.updateRegistrationInHazelcast(this.jobExecution); + HazelcastJsonValue jsonValue = this.fileTransferNodeMap.get(this.appName); + FileTransferNodeMetaData testData = this.objectMapper.readValue(jsonValue.getValue(), FileTransferNodeMetaData.class); + Assert.assertNotNull(testData); + Assert.assertEquals(true, testData.getRunningJob()); + Assert.assertEquals(this.testJobUuid, testData.getJobUuid()); + } + + @Test + public void testDeRegisterNodeFromMap() throws JsonProcessingException { + testObj.updateRegistrationInHazelcast(null); + testObj.updateRegistrationInHazelcast(this.jobExecution); + HazelcastJsonValue jsonValue = this.fileTransferNodeMap.get(this.appName); + FileTransferNodeMetaData testData = objectMapper.readValue(jsonValue.getValue(), FileTransferNodeMetaData.class); + Assert.assertNotNull(testData); + Assert.assertEquals(false, testData.getRunningJob()); + Assert.assertEquals(false, testData.getOnline()); + + } + +} From 859df4e85f157495ffceda4e6ad125eddf7f1e9c Mon Sep 17 00:00:00 2001 From: Jacob Goldverg Date: Sat, 14 Sep 2024 19:12:04 -0400 Subject: [PATCH 125/150] Removing rabbitmq in faver of hazelcast --- pom.xml | 4 - .../config/BatchConfig.java | 13 --- .../config/HazelcastClientConfig.java | 12 ++- .../config/OptimizerConfig.java | 31 ------- .../config/RabbitMQConfig.java | 38 --------- .../consumer/RabbitMQConsumer.java | 83 ------------------- .../TransferApplicationParamHandler.java | 3 +- .../TransferApplicationParams.java | 2 +- .../optimizer/OptimizerCreateRequest.java | 35 -------- .../optimizer/OptimizerDeleteRequest.java | 12 --- .../service/OptimizerService.java | 58 ------------- .../listner/JobCompletionListener.java | 6 +- src/main/resources/application.properties | 21 ----- 13 files changed, 13 insertions(+), 305 deletions(-) delete mode 100644 src/main/java/org/onedatashare/transferservice/odstransferservice/config/OptimizerConfig.java delete mode 100644 src/main/java/org/onedatashare/transferservice/odstransferservice/config/RabbitMQConfig.java delete mode 100644 src/main/java/org/onedatashare/transferservice/odstransferservice/consumer/RabbitMQConsumer.java rename src/main/java/org/onedatashare/transferservice/odstransferservice/model/{optimizer => }/TransferApplicationParams.java (94%) delete mode 100644 src/main/java/org/onedatashare/transferservice/odstransferservice/model/optimizer/OptimizerCreateRequest.java delete mode 100644 src/main/java/org/onedatashare/transferservice/odstransferservice/model/optimizer/OptimizerDeleteRequest.java delete mode 100644 src/main/java/org/onedatashare/transferservice/odstransferservice/service/OptimizerService.java diff --git a/pom.xml b/pom.xml index 3863b584..5392a70a 100644 --- a/pom.xml +++ b/pom.xml @@ -67,10 +67,6 @@ commons-pool2 2.11.1 - - org.springframework.boot - spring-boot-starter-amqp - com.box box-java-sdk diff --git a/src/main/java/org/onedatashare/transferservice/odstransferservice/config/BatchConfig.java b/src/main/java/org/onedatashare/transferservice/odstransferservice/config/BatchConfig.java index 0171e420..3086d30e 100644 --- a/src/main/java/org/onedatashare/transferservice/odstransferservice/config/BatchConfig.java +++ b/src/main/java/org/onedatashare/transferservice/odstransferservice/config/BatchConfig.java @@ -1,9 +1,5 @@ package org.onedatashare.transferservice.odstransferservice.config; -import com.amazonaws.regions.AwsRegionProvider; -import com.fasterxml.jackson.annotation.JsonInclude; -import com.fasterxml.jackson.databind.DeserializationFeature; -import com.fasterxml.jackson.databind.ObjectMapper; import org.springframework.batch.core.launch.JobLauncher; import org.springframework.batch.core.launch.support.TaskExecutorJobLauncher; import org.springframework.batch.core.repository.JobRepository; @@ -21,15 +17,6 @@ @Configuration public class BatchConfig { - - @Bean - public ObjectMapper messageObjectMapper() { - ObjectMapper objectMapper = new ObjectMapper(); - objectMapper.configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, true); - objectMapper.setDefaultPropertyInclusion(JsonInclude.Include.ALWAYS); - return objectMapper; - } - @Bean public PlatformTransactionManager transactionManager(DataSource dataSource) { return new DataSourceTransactionManager(dataSource); diff --git a/src/main/java/org/onedatashare/transferservice/odstransferservice/config/HazelcastClientConfig.java b/src/main/java/org/onedatashare/transferservice/odstransferservice/config/HazelcastClientConfig.java index 76371bc4..eff4e1d3 100644 --- a/src/main/java/org/onedatashare/transferservice/odstransferservice/config/HazelcastClientConfig.java +++ b/src/main/java/org/onedatashare/transferservice/odstransferservice/config/HazelcastClientConfig.java @@ -13,8 +13,6 @@ import org.springframework.context.annotation.Profile; import org.springframework.core.env.Environment; -import java.util.UUID; - @Configuration public class HazelcastClientConfig { @@ -58,4 +56,14 @@ public HazelcastInstance hazelcastInstance(ClientConfig clientConfig) { public IMap fileTransferNodeRegistrationMap(HazelcastInstance hazelcastInstance) { return hazelcastInstance.getMap("file-transfer-node-map"); } + + @Bean + public IMap fileTransferSchedule(HazelcastInstance hazelcastInstance) { + return hazelcastInstance.getMap("file-transfer-schedule-map"); + } + + @Bean + public IMap carbonIntensityMap(HazelcastInstance hazelcastInstance) { + return hazelcastInstance.getMap("carbon-intensity-map"); + } } diff --git a/src/main/java/org/onedatashare/transferservice/odstransferservice/config/OptimizerConfig.java b/src/main/java/org/onedatashare/transferservice/odstransferservice/config/OptimizerConfig.java deleted file mode 100644 index b164d4f6..00000000 --- a/src/main/java/org/onedatashare/transferservice/odstransferservice/config/OptimizerConfig.java +++ /dev/null @@ -1,31 +0,0 @@ -package org.onedatashare.transferservice.odstransferservice.config; - -import org.springframework.beans.factory.annotation.Value; -import org.springframework.boot.web.client.RestTemplateBuilder; -import org.springframework.context.annotation.Bean; -import org.springframework.context.annotation.Configuration; -import org.springframework.web.client.RestTemplate; -import org.springframework.web.util.DefaultUriBuilderFactory; - -import java.util.concurrent.Executor; -import java.util.concurrent.ExecutorService; -import java.util.concurrent.Executors; - -@Configuration -public class OptimizerConfig { - - @Value("${optimizer.url}") - private String optimizerUrl; - - @Bean - public RestTemplate optimizerTemplate() { - return new RestTemplateBuilder() - .uriTemplateHandler(new DefaultUriBuilderFactory(optimizerUrl)) - .build(); - } - - @Bean(name ="optimizerTaskExecutor") - public Executor optimizerTaskExecutor(){ - return Executors.newVirtualThreadPerTaskExecutor(); - } -} diff --git a/src/main/java/org/onedatashare/transferservice/odstransferservice/config/RabbitMQConfig.java b/src/main/java/org/onedatashare/transferservice/odstransferservice/config/RabbitMQConfig.java deleted file mode 100644 index 5fb680b1..00000000 --- a/src/main/java/org/onedatashare/transferservice/odstransferservice/config/RabbitMQConfig.java +++ /dev/null @@ -1,38 +0,0 @@ -package org.onedatashare.transferservice.odstransferservice.config; - -import org.springframework.amqp.core.Binding; -import org.springframework.amqp.core.BindingBuilder; -import org.springframework.amqp.core.DirectExchange; -import org.springframework.amqp.core.Queue; -import org.springframework.beans.factory.annotation.Value; -import org.springframework.context.annotation.Bean; -import org.springframework.context.annotation.Configuration; - - -@Configuration -public class RabbitMQConfig { - - @Value("${ods.rabbitmq.queue}") - String queueName; - - @Value("${ods.rabbitmq.exchange}") - String exchange; - - @Bean - Queue userQueue() { - return new Queue(this.queueName, true, false, false); - } - - @Bean - public DirectExchange exchange() { - return new DirectExchange(exchange); - } - - @Bean - public Binding binding(DirectExchange exchange, Queue userQueue) { - return BindingBuilder.bind(userQueue) - .to(exchange) - .with(queueName); - } - -} diff --git a/src/main/java/org/onedatashare/transferservice/odstransferservice/consumer/RabbitMQConsumer.java b/src/main/java/org/onedatashare/transferservice/odstransferservice/consumer/RabbitMQConsumer.java deleted file mode 100644 index 13b7ad6d..00000000 --- a/src/main/java/org/onedatashare/transferservice/odstransferservice/consumer/RabbitMQConsumer.java +++ /dev/null @@ -1,83 +0,0 @@ -package org.onedatashare.transferservice.odstransferservice.consumer; - - -import org.onedatashare.transferservice.odstransferservice.Enum.MessageType; -import org.onedatashare.transferservice.odstransferservice.message.CarbonAvgRequestHandler; -import org.onedatashare.transferservice.odstransferservice.message.CarbonIpRequestHandler; -import org.onedatashare.transferservice.odstransferservice.message.TransferApplicationParamHandler; -import org.onedatashare.transferservice.odstransferservice.message.TransferJobRequestHandler; -import org.springframework.amqp.core.Message; -import org.springframework.amqp.core.MessagePostProcessor; -import org.springframework.amqp.core.Queue; -import org.springframework.amqp.rabbit.annotation.RabbitListener; -import org.springframework.amqp.rabbit.core.RabbitTemplate; -import org.springframework.stereotype.Service; - -import java.io.IOException; - -import static org.springframework.amqp.core.MessageProperties.CONTENT_TYPE_JSON; - -@Service -public class RabbitMQConsumer { - - private final TransferJobRequestHandler transferJobRequestHandler; - private final CarbonAvgRequestHandler carbonAvgRequestHandler; - private final TransferApplicationParamHandler transferApplicationParamHandler; - - private final CarbonIpRequestHandler carbonIpRequestHandler; - - Queue userQueue; - - RabbitTemplate rabbitTemplate; - - - public RabbitMQConsumer(RabbitTemplate rabbitTemplate, Queue userQueue, TransferJobRequestHandler transferJobRequestHandler, CarbonAvgRequestHandler carbonAvgRequestHandler, TransferApplicationParamHandler transferApplicationParamHandler, CarbonIpRequestHandler carbonIpRequestHandler) { - this.userQueue = userQueue; - this.transferJobRequestHandler = transferJobRequestHandler; - this.carbonAvgRequestHandler = carbonAvgRequestHandler; - this.transferApplicationParamHandler = transferApplicationParamHandler; - this.carbonIpRequestHandler = carbonIpRequestHandler; - this.rabbitTemplate = rabbitTemplate; - } - - @RabbitListener(queues = "#{userQueue}") - public void consumeDefaultMessage(Message message) { - MessageType messageType = MessageType.valueOf(message.getMessageProperties().getHeader("type")); - try { - switch (messageType) { - case TRANSFER_JOB_REQUEST: { - this.transferJobRequestHandler.messageHandler(message); - break; - } - - case APPLICATION_PARAM_CHANGE: { - this.transferApplicationParamHandler.messageHandler(message); - break; - } - - case CARBON_AVG_REQUEST: { - this.carbonAvgRequestHandler.messageHandler(message); - break; - } - - case CARBON_IP_REQUEST: { - this.carbonIpRequestHandler.messageHandler(message); - break; - } - } - } catch (IOException e) { - e.printStackTrace(); - } - -// channel.basicAck(tag, false); - } - - public static MessagePostProcessor embedMessageType(String correlationId) { - return message -> { - message.getMessageProperties().setCorrelationId(correlationId); - message.getMessageProperties().setType(CONTENT_TYPE_JSON); - return message; - }; - } - -} \ No newline at end of file diff --git a/src/main/java/org/onedatashare/transferservice/odstransferservice/message/TransferApplicationParamHandler.java b/src/main/java/org/onedatashare/transferservice/odstransferservice/message/TransferApplicationParamHandler.java index 60a918cd..ccb64b62 100644 --- a/src/main/java/org/onedatashare/transferservice/odstransferservice/message/TransferApplicationParamHandler.java +++ b/src/main/java/org/onedatashare/transferservice/odstransferservice/message/TransferApplicationParamHandler.java @@ -1,8 +1,7 @@ package org.onedatashare.transferservice.odstransferservice.message; -import com.fasterxml.jackson.core.JsonProcessingException; import com.fasterxml.jackson.databind.ObjectMapper; -import org.onedatashare.transferservice.odstransferservice.model.optimizer.TransferApplicationParams; +import org.onedatashare.transferservice.odstransferservice.model.TransferApplicationParams; import org.onedatashare.transferservice.odstransferservice.pools.ThreadPoolContract; import org.slf4j.Logger; import org.slf4j.LoggerFactory; diff --git a/src/main/java/org/onedatashare/transferservice/odstransferservice/model/optimizer/TransferApplicationParams.java b/src/main/java/org/onedatashare/transferservice/odstransferservice/model/TransferApplicationParams.java similarity index 94% rename from src/main/java/org/onedatashare/transferservice/odstransferservice/model/optimizer/TransferApplicationParams.java rename to src/main/java/org/onedatashare/transferservice/odstransferservice/model/TransferApplicationParams.java index bec5c9b3..2d88ccac 100644 --- a/src/main/java/org/onedatashare/transferservice/odstransferservice/model/optimizer/TransferApplicationParams.java +++ b/src/main/java/org/onedatashare/transferservice/odstransferservice/model/TransferApplicationParams.java @@ -1,4 +1,4 @@ -package org.onedatashare.transferservice.odstransferservice.model.optimizer; +package org.onedatashare.transferservice.odstransferservice.model; import lombok.Data; diff --git a/src/main/java/org/onedatashare/transferservice/odstransferservice/model/optimizer/OptimizerCreateRequest.java b/src/main/java/org/onedatashare/transferservice/odstransferservice/model/optimizer/OptimizerCreateRequest.java deleted file mode 100644 index 37488f10..00000000 --- a/src/main/java/org/onedatashare/transferservice/odstransferservice/model/optimizer/OptimizerCreateRequest.java +++ /dev/null @@ -1,35 +0,0 @@ -package org.onedatashare.transferservice.odstransferservice.model.optimizer; - -import lombok.AllArgsConstructor; -import lombok.Data; - -import java.util.UUID; - -@Data -public class OptimizerCreateRequest { - String nodeId; - int maxConcurrency; - int maxParallelism; - int maxPipelining; - int maxChunkSize; - String optimizerType; - long fileCount; - Long jobId; - String dbType; - String jobUuid; - String userId; - - public OptimizerCreateRequest(String userId,String nodeId, int maxConcurrency, int maxParallelism, int maxPipelining, String optimizerType, long fileCount, long jobId, String dbType, String jobUuid) { - this.userId = userId; - this.maxConcurrency = maxConcurrency; - this.maxChunkSize = Integer.MAX_VALUE; - this.maxParallelism = maxParallelism; - this.maxPipelining = maxPipelining; - this.nodeId = nodeId; - this.optimizerType = optimizerType; - this.fileCount = fileCount; - this.jobId = jobId; - this.dbType = dbType; - this.jobUuid = jobUuid; - } -} diff --git a/src/main/java/org/onedatashare/transferservice/odstransferservice/model/optimizer/OptimizerDeleteRequest.java b/src/main/java/org/onedatashare/transferservice/odstransferservice/model/optimizer/OptimizerDeleteRequest.java deleted file mode 100644 index 5f47d53e..00000000 --- a/src/main/java/org/onedatashare/transferservice/odstransferservice/model/optimizer/OptimizerDeleteRequest.java +++ /dev/null @@ -1,12 +0,0 @@ -package org.onedatashare.transferservice.odstransferservice.model.optimizer; - -import lombok.Data; - -@Data -public class OptimizerDeleteRequest { - private String nodeId; - - public OptimizerDeleteRequest(String nodeId) { - this.nodeId = nodeId; - } -} diff --git a/src/main/java/org/onedatashare/transferservice/odstransferservice/service/OptimizerService.java b/src/main/java/org/onedatashare/transferservice/odstransferservice/service/OptimizerService.java deleted file mode 100644 index a5fa5457..00000000 --- a/src/main/java/org/onedatashare/transferservice/odstransferservice/service/OptimizerService.java +++ /dev/null @@ -1,58 +0,0 @@ -package org.onedatashare.transferservice.odstransferservice.service; - -import org.onedatashare.transferservice.odstransferservice.model.optimizer.OptimizerCreateRequest; -import org.onedatashare.transferservice.odstransferservice.model.optimizer.OptimizerDeleteRequest; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; -import org.springframework.beans.factory.annotation.Autowired; -import org.springframework.beans.factory.annotation.Value; -import org.springframework.http.HttpEntity; -import org.springframework.http.HttpHeaders; -import org.springframework.http.MediaType; -import org.springframework.scheduling.annotation.Async; -import org.springframework.stereotype.Service; -import org.springframework.web.client.RestClientException; -import org.springframework.web.client.RestTemplate; - -import java.util.concurrent.CompletableFuture; - -@Service -public class OptimizerService { - - @Autowired - RestTemplate optimizerTemplate; - - @Value("${spring.application.name}") - String appName; - - HttpHeaders headers; - - Logger logger = LoggerFactory.getLogger(OptimizerService.class); - - public OptimizerService() { - headers = new HttpHeaders(); - headers.setContentType(MediaType.APPLICATION_JSON); - } - - @Async("optimizerTaskExecutor") - public void createOptimizerBlocking(OptimizerCreateRequest optimizerCreateRequest) throws RestClientException { - optimizerCreateRequest.setNodeId(this.appName); - logger.info("Sending OptimizerCreateRequest {}", optimizerCreateRequest); - HttpEntity createRequestHttpEntity = new HttpEntity<>(optimizerCreateRequest, this.headers); - logger.info(createRequestHttpEntity.getBody().toString()); - this.optimizerTemplate.postForLocation("/optimizer/create", createRequestHttpEntity, Void.class); - CompletableFuture.completedFuture(null); - } - - @Async("optimizerTaskExecutor") - public void deleteOptimizerBlocking(OptimizerDeleteRequest optimizerDeleteRequest) { - optimizerDeleteRequest.setNodeId(this.appName); - try { - this.optimizerTemplate.postForObject("/optimizer/delete", new HttpEntity<>(optimizerDeleteRequest, this.headers), Void.class); - } catch (RestClientException e) { - logger.error("Failed to Delete optimizer. {}", optimizerDeleteRequest); - } - logger.info("Deleted {}", optimizerDeleteRequest); - CompletableFuture.completedFuture(null); - } -} diff --git a/src/main/java/org/onedatashare/transferservice/odstransferservice/service/listner/JobCompletionListener.java b/src/main/java/org/onedatashare/transferservice/odstransferservice/service/listner/JobCompletionListener.java index bc658d01..2fd01f38 100644 --- a/src/main/java/org/onedatashare/transferservice/odstransferservice/service/listner/JobCompletionListener.java +++ b/src/main/java/org/onedatashare/transferservice/odstransferservice/service/listner/JobCompletionListener.java @@ -5,7 +5,6 @@ import org.onedatashare.transferservice.odstransferservice.service.ConnectionBag; import org.onedatashare.transferservice.odstransferservice.service.FileTransferNodeRegistrationService; import org.onedatashare.transferservice.odstransferservice.service.MetricsCollector; -import org.onedatashare.transferservice.odstransferservice.service.OptimizerService; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.batch.core.JobExecution; @@ -28,15 +27,12 @@ public class JobCompletionListener implements JobExecutionListener { MetricsCollector metricsCollector; - OptimizerService optimizerService; - boolean optimizerEnable; @Autowired FileTransferNodeRegistrationService fileTransferNodeRegistrationService; - public JobCompletionListener(OptimizerService optimizerService, MetricsCollector metricsCollector, ConnectionBag connectionBag, ThreadPoolContract threadPool, Set jobIds) { - this.optimizerService = optimizerService; + public JobCompletionListener(MetricsCollector metricsCollector, ConnectionBag connectionBag, ThreadPoolContract threadPool, Set jobIds) { this.metricsCollector = metricsCollector; this.connectionBag = connectionBag; this.optimizerEnable = false; diff --git a/src/main/resources/application.properties b/src/main/resources/application.properties index 41f89e6d..fb62b1a1 100644 --- a/src/main/resources/application.properties +++ b/src/main/resources/application.properties @@ -7,14 +7,6 @@ spring.main.allow-bean-definition-overriding=true gdrive.client.id=${ODS_GDRIVE_CLIENT_ID} gdrive.client.secret=${ODS_GDRIVE_CLIENT_SECRET} -#Eureka config -eureka.client.enabled=true -eureka.client.serviceUrl.defaultZone=http://${EUREKA_USER:admin}:${EUREKA_PASS:admin}@${EUREKA_URI:localhost:8090}/eureka -eureka.client.healthcheck.enabled=true -#eureka.client.registry-fetch-interval-seconds=5 -#eureka.instance.leaseRenewalIntervalInSeconds=10 -#eureka.instance.metadata-map.startup=${random.int} - #SBA management.endpoints.web.exposure.include=* @@ -22,19 +14,6 @@ management.endpoints.web.exposure.include=* spring.jackson.default-property-inclusion=NON_NULL spring.batch.job.enabled=false -#RabitMQ -ods.rabbitmq.exchange=ods.exchange -#for vfs nodes this should be the APP_NAME which is always lowercase. -ods.rabbitmq.queue=${CONNECTOR_QUEUE:transferQueue} -spring.rabbitmq.addresses=${AMPQ_ADDRESS} -spring.rabbitmq.port=${AMPQ_PORT:5672} -spring.rabbitmq.username=${AMPQ_USER:guest} -spring.rabbitmq.password=${AMPQ_PWD:guest} -spring.rabbitmq.connection-timeout=20000 - -#optimizer -optimizer.url=${OPTIMIZER_URL:http://localhost:8088} - #pmeter pmeter.report.path=${PMETER_REPORT_PATH:${HOME}/.pmeter/transfer_service_pmeter_measure.txt} pmeter.cron.run=${ENABLE_PMETER:false} From 4c2f3fa39adac4c54876f78c06b71b84fc3e0c25 Mon Sep 17 00:00:00 2001 From: Jacob Goldverg Date: Sat, 14 Sep 2024 21:54:37 -0400 Subject: [PATCH 126/150] initial impl of auction driven design for transfer nodes recieving jobs --- .../config/HazelcastClientConfig.java | 18 ++++- .../controller/TransferController.java | 19 +---- .../message/CarbonAvgRequestHandler.java | 64 ---------------- .../message/CarbonIpRequestHandler.java | 56 -------------- .../message/MessageHandler.java | 5 +- .../TransferApplicationParamHandler.java | 13 ++-- .../message/TransferJobRequestHandler.java | 13 +--- .../model/CarbonMeasureRequest.java | 10 --- .../model/CarbonMeasureResponse.java | 9 --- .../credential/AccountEndpointCredential.java | 9 +-- .../service/CarbonJobMeasure.java | 73 +++++++++++++++++++ .../service/HazelcastConsumer.java | 66 +++++++++++++++++ .../InfluxIOService.java | 2 +- .../{listner => }/JobCompletionListener.java | 5 +- .../service/JobControl.java | 2 - .../service/JobParamService.java | 43 +++-------- .../service/MetricsCollector.java | 1 - .../utility/ODSUtility.java | 43 +++++++++-- src/main/resources/application.properties | 8 ++ 19 files changed, 228 insertions(+), 231 deletions(-) delete mode 100644 src/main/java/org/onedatashare/transferservice/odstransferservice/message/CarbonAvgRequestHandler.java delete mode 100644 src/main/java/org/onedatashare/transferservice/odstransferservice/message/CarbonIpRequestHandler.java delete mode 100644 src/main/java/org/onedatashare/transferservice/odstransferservice/model/CarbonMeasureRequest.java delete mode 100644 src/main/java/org/onedatashare/transferservice/odstransferservice/model/CarbonMeasureResponse.java create mode 100644 src/main/java/org/onedatashare/transferservice/odstransferservice/service/CarbonJobMeasure.java create mode 100644 src/main/java/org/onedatashare/transferservice/odstransferservice/service/HazelcastConsumer.java rename src/main/java/org/onedatashare/transferservice/odstransferservice/service/{DatabaseService => }/InfluxIOService.java (99%) rename src/main/java/org/onedatashare/transferservice/odstransferservice/service/{listner => }/JobCompletionListener.java (90%) diff --git a/src/main/java/org/onedatashare/transferservice/odstransferservice/config/HazelcastClientConfig.java b/src/main/java/org/onedatashare/transferservice/odstransferservice/config/HazelcastClientConfig.java index eff4e1d3..500c8c52 100644 --- a/src/main/java/org/onedatashare/transferservice/odstransferservice/config/HazelcastClientConfig.java +++ b/src/main/java/org/onedatashare/transferservice/odstransferservice/config/HazelcastClientConfig.java @@ -3,16 +3,20 @@ import com.fasterxml.jackson.databind.ObjectMapper; import com.hazelcast.client.HazelcastClient; import com.hazelcast.client.config.ClientConfig; +import com.hazelcast.collection.IQueue; import com.hazelcast.core.HazelcastInstance; import com.hazelcast.core.HazelcastJsonValue; import com.hazelcast.map.IMap; import org.onedatashare.transferservice.odstransferservice.service.FileTransferNodeRegistrationLifeCycleListener; import org.springframework.beans.factory.annotation.Qualifier; +import org.springframework.beans.factory.annotation.Value; import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.Configuration; import org.springframework.context.annotation.Profile; import org.springframework.core.env.Environment; +import java.util.UUID; + @Configuration public class HazelcastClientConfig { @@ -24,6 +28,9 @@ public HazelcastClientConfig(Environment environment, ObjectMapper objectMapper) this.objectMapper = objectMapper; } + @Value("spring.application.name") + private String appName; + @Bean @Qualifier("clientConfig") @Profile("local") @@ -53,17 +60,22 @@ public HazelcastInstance hazelcastInstance(ClientConfig clientConfig) { } @Bean - public IMap fileTransferNodeRegistrationMap(HazelcastInstance hazelcastInstance) { + public IMap fileTransferNodeRegistrationMap(@Qualifier("hazelcastInstance") HazelcastInstance hazelcastInstance) { return hazelcastInstance.getMap("file-transfer-node-map"); } @Bean - public IMap fileTransferSchedule(HazelcastInstance hazelcastInstance) { + public IMap fileTransferScheduleMap(@Qualifier("hazelcastInstance") HazelcastInstance hazelcastInstance) { return hazelcastInstance.getMap("file-transfer-schedule-map"); } @Bean - public IMap carbonIntensityMap(HazelcastInstance hazelcastInstance) { + public IMap carbonIntensityMap(@Qualifier("hazelcastInstance") HazelcastInstance hazelcastInstance) { return hazelcastInstance.getMap("carbon-intensity-map"); } + + @Bean + public IQueue messageQueue(@Qualifier("hazelcastInstance") HazelcastInstance hazelcastInstance) { + return hazelcastInstance.getQueue(appName); + } } diff --git a/src/main/java/org/onedatashare/transferservice/odstransferservice/controller/TransferController.java b/src/main/java/org/onedatashare/transferservice/odstransferservice/controller/TransferController.java index bc56cb0b..be31f3d2 100644 --- a/src/main/java/org/onedatashare/transferservice/odstransferservice/controller/TransferController.java +++ b/src/main/java/org/onedatashare/transferservice/odstransferservice/controller/TransferController.java @@ -2,15 +2,7 @@ import org.onedatashare.transferservice.odstransferservice.model.TransferJobRequest; import org.onedatashare.transferservice.odstransferservice.service.JobControl; -import org.onedatashare.transferservice.odstransferservice.service.JobParamService; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; -import org.springframework.batch.core.Job; import org.springframework.batch.core.JobExecution; -import org.springframework.batch.core.JobParameters; -import org.springframework.batch.core.JobParametersBuilder; -import org.springframework.batch.core.launch.JobLauncher; -import org.springframework.http.HttpStatus; import org.springframework.http.ResponseEntity; import org.springframework.scheduling.annotation.Async; import org.springframework.web.bind.annotation.RequestBody; @@ -26,19 +18,10 @@ @RequestMapping("/api/v1/transfer") public class TransferController { - Logger logger = LoggerFactory.getLogger(TransferController.class); - JobControl jobControl; - JobLauncher jobLauncher; - - JobParamService jobParamService; - - public TransferController(JobControl jobControl, JobLauncher jobLauncher, JobParamService jobParamService) { + public TransferController(JobControl jobControl) { this.jobControl = jobControl; - this.jobLauncher = jobLauncher; - this.jobParamService = jobParamService; - } @RequestMapping(value = "/start", method = RequestMethod.POST) diff --git a/src/main/java/org/onedatashare/transferservice/odstransferservice/message/CarbonAvgRequestHandler.java b/src/main/java/org/onedatashare/transferservice/odstransferservice/message/CarbonAvgRequestHandler.java deleted file mode 100644 index d0f5aa2a..00000000 --- a/src/main/java/org/onedatashare/transferservice/odstransferservice/message/CarbonAvgRequestHandler.java +++ /dev/null @@ -1,64 +0,0 @@ -package org.onedatashare.transferservice.odstransferservice.message; - -import com.fasterxml.jackson.annotation.JsonInclude; -import com.fasterxml.jackson.databind.DeserializationFeature; -import com.fasterxml.jackson.databind.ObjectMapper; -import org.onedatashare.transferservice.odstransferservice.consumer.RabbitMQConsumer; -import org.onedatashare.transferservice.odstransferservice.model.CarbonMeasureRequest; -import org.onedatashare.transferservice.odstransferservice.model.CarbonMeasureResponse; -import org.onedatashare.transferservice.odstransferservice.model.metrics.CarbonScore; -import org.onedatashare.transferservice.odstransferservice.service.PmeterParser; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; -import org.springframework.amqp.core.Message; -import org.springframework.amqp.core.MessageBuilder; -import org.springframework.amqp.core.MessagePostProcessor; -import org.springframework.amqp.rabbit.core.RabbitTemplate; -import org.springframework.beans.factory.annotation.Value; -import org.springframework.http.MediaType; -import org.springframework.stereotype.Service; - -import java.io.IOException; - -@Service -public class CarbonAvgRequestHandler implements MessageHandler { - - private final ObjectMapper objectMapper; - private final PmeterParser pmeterParser; - private final RabbitTemplate rabbitTemplate; - - @Value("${spring.application.name}") - String applicationName; - - - Logger logger = LoggerFactory.getLogger(CarbonAvgRequestHandler.class); - - public CarbonAvgRequestHandler(ObjectMapper messageObjectMapper, PmeterParser pmeterParser, RabbitTemplate rabbitTemplate) { - this.objectMapper = messageObjectMapper; - this.objectMapper.configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, true); - this.objectMapper.setDefaultPropertyInclusion(JsonInclude.Include.ALWAYS); - this.pmeterParser = pmeterParser; - this.rabbitTemplate = rabbitTemplate; - } - - @Override - public void messageHandler(Message message) throws IOException { - String jsonStr = new String(message.getBody()); - CarbonMeasureRequest carbonMeasureRequest = objectMapper.readValue(jsonStr, CarbonMeasureRequest.class); - logger.info("Received CarbonMeasureRequest: {}", carbonMeasureRequest); - CarbonScore sourceCarbonScore = this.pmeterParser.carbonAverageTraceRoute(carbonMeasureRequest.sourceIp); - CarbonScore destCarbonScore = this.pmeterParser.carbonAverageTraceRoute(carbonMeasureRequest.destinationIp); - double average = (double) (sourceCarbonScore.getAvgCarbon() + destCarbonScore.getAvgCarbon()) / 2; - CarbonMeasureResponse resp = new CarbonMeasureResponse(); - resp.transferNodeName = this.applicationName; - resp.averageCarbonIntensity = average; - logger.info("Response: CarbonMeasureResponse: {}", resp); - String jsonResp = this.objectMapper.writeValueAsString(resp); - MessagePostProcessor messagePostProcessor = RabbitMQConsumer.embedMessageType(message.getMessageProperties().getCorrelationId()); - Message msg = MessageBuilder.withBody(jsonResp.getBytes()) - .setContentType(MediaType.APPLICATION_JSON_VALUE) - .build(); - this.rabbitTemplate.convertAndSend(message.getMessageProperties().getReplyTo(), msg, messagePostProcessor); - - } -} diff --git a/src/main/java/org/onedatashare/transferservice/odstransferservice/message/CarbonIpRequestHandler.java b/src/main/java/org/onedatashare/transferservice/odstransferservice/message/CarbonIpRequestHandler.java deleted file mode 100644 index 87e80c17..00000000 --- a/src/main/java/org/onedatashare/transferservice/odstransferservice/message/CarbonIpRequestHandler.java +++ /dev/null @@ -1,56 +0,0 @@ -package org.onedatashare.transferservice.odstransferservice.message; - -import com.fasterxml.jackson.core.JsonProcessingException; -import com.fasterxml.jackson.databind.ObjectMapper; -import org.onedatashare.transferservice.odstransferservice.consumer.RabbitMQConsumer; -import org.onedatashare.transferservice.odstransferservice.model.CarbonIpEntry; -import org.onedatashare.transferservice.odstransferservice.model.CarbonMeasureRequest; -import org.onedatashare.transferservice.odstransferservice.service.PmeterParser; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; -import org.springframework.amqp.core.Message; -import org.springframework.amqp.core.MessageBuilder; -import org.springframework.amqp.core.MessagePostProcessor; -import org.springframework.amqp.rabbit.core.RabbitTemplate; -import org.springframework.http.MediaType; -import org.springframework.stereotype.Service; - -import java.io.IOException; -import java.util.HashMap; -import java.util.List; -import java.util.Map; - -@Service -public class CarbonIpRequestHandler implements MessageHandler { - - private final ObjectMapper objectMapper; - private final PmeterParser pmeterParser; - private final RabbitTemplate rabbitTemplate; - - Logger logger = LoggerFactory.getLogger(CarbonIpRequestHandler.class); - - public CarbonIpRequestHandler(ObjectMapper messageObjectMapper, PmeterParser pmeterParser, RabbitTemplate rabbitTemplate) { - this.objectMapper = messageObjectMapper; - this.pmeterParser = pmeterParser; - this.rabbitTemplate = rabbitTemplate; - - } - - @Override - public void messageHandler(Message message) throws IOException { - String jsonStr = new String(message.getBody()); - CarbonMeasureRequest carbonMeasureRequest = objectMapper.readValue(jsonStr, CarbonMeasureRequest.class); - logger.info("Received CarbonMeasureRequest: {}", carbonMeasureRequest); - List sourceTraceRouteCarbon = this.pmeterParser.carbonPerIp(carbonMeasureRequest.sourceIp); - List destinationTraceRouteCarbon = this.pmeterParser.carbonPerIp(carbonMeasureRequest.destinationIp); - sourceTraceRouteCarbon.addAll(destinationTraceRouteCarbon); - String jsonResp = this.objectMapper.writeValueAsString(sourceTraceRouteCarbon); - MessagePostProcessor messagePostProcessor = RabbitMQConsumer.embedMessageType(message.getMessageProperties().getCorrelationId()); - Message msg = MessageBuilder.withBody(jsonResp.getBytes()) - .setContentType(MediaType.APPLICATION_JSON_VALUE) - .build(); - logger.info("Sending reply too: {}", message.getMessageProperties().getReplyTo()); - this.rabbitTemplate.convertAndSend(message.getMessageProperties().getReplyTo(), msg, messagePostProcessor); - - } -} diff --git a/src/main/java/org/onedatashare/transferservice/odstransferservice/message/MessageHandler.java b/src/main/java/org/onedatashare/transferservice/odstransferservice/message/MessageHandler.java index 210bc944..29dd65f6 100644 --- a/src/main/java/org/onedatashare/transferservice/odstransferservice/message/MessageHandler.java +++ b/src/main/java/org/onedatashare/transferservice/odstransferservice/message/MessageHandler.java @@ -1,10 +1,9 @@ package org.onedatashare.transferservice.odstransferservice.message; -import com.fasterxml.jackson.core.JsonProcessingException; -import org.springframework.amqp.core.Message; +import com.hazelcast.core.HazelcastJsonValue; import java.io.IOException; public interface MessageHandler { - void messageHandler(Message message) throws IOException; + void messageHandler(HazelcastJsonValue jsonMsg) throws IOException; } diff --git a/src/main/java/org/onedatashare/transferservice/odstransferservice/message/TransferApplicationParamHandler.java b/src/main/java/org/onedatashare/transferservice/odstransferservice/message/TransferApplicationParamHandler.java index ccb64b62..95a2f534 100644 --- a/src/main/java/org/onedatashare/transferservice/odstransferservice/message/TransferApplicationParamHandler.java +++ b/src/main/java/org/onedatashare/transferservice/odstransferservice/message/TransferApplicationParamHandler.java @@ -1,30 +1,29 @@ package org.onedatashare.transferservice.odstransferservice.message; +import com.fasterxml.jackson.core.JsonProcessingException; import com.fasterxml.jackson.databind.ObjectMapper; +import com.hazelcast.core.HazelcastJsonValue; import org.onedatashare.transferservice.odstransferservice.model.TransferApplicationParams; import org.onedatashare.transferservice.odstransferservice.pools.ThreadPoolContract; import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import org.springframework.amqp.core.Message; import org.springframework.stereotype.Service; -import java.io.IOException; - @Service -public class TransferApplicationParamHandler implements MessageHandler{ +public class TransferApplicationParamHandler implements MessageHandler { private final ObjectMapper mesageObjectMapper; private final ThreadPoolContract threadPool; Logger logger = LoggerFactory.getLogger(TransferApplicationParamHandler.class); - public TransferApplicationParamHandler(ObjectMapper messageObjectMapper, ThreadPoolContract threadPool){ + public TransferApplicationParamHandler(ObjectMapper messageObjectMapper, ThreadPoolContract threadPool) { this.mesageObjectMapper = messageObjectMapper; this.threadPool = threadPool; } @Override - public void messageHandler(Message message) throws IOException { - String jsonStr = new String(message.getBody()); + public void messageHandler(HazelcastJsonValue jsonMsg) throws JsonProcessingException { + String jsonStr = jsonMsg.getValue(); TransferApplicationParams params = mesageObjectMapper.readValue(jsonStr, TransferApplicationParams.class); logger.info("Parsed TransferApplicationParams: {}", params); this.threadPool.applyOptimizer(params.getConcurrency(), params.getParallelism()); diff --git a/src/main/java/org/onedatashare/transferservice/odstransferservice/message/TransferJobRequestHandler.java b/src/main/java/org/onedatashare/transferservice/odstransferservice/message/TransferJobRequestHandler.java index ed0db7da..57069502 100644 --- a/src/main/java/org/onedatashare/transferservice/odstransferservice/message/TransferJobRequestHandler.java +++ b/src/main/java/org/onedatashare/transferservice/odstransferservice/message/TransferJobRequestHandler.java @@ -1,21 +1,16 @@ package org.onedatashare.transferservice.odstransferservice.message; +import com.fasterxml.jackson.core.JsonProcessingException; import com.fasterxml.jackson.databind.ObjectMapper; +import com.hazelcast.core.HazelcastJsonValue; import org.onedatashare.transferservice.odstransferservice.model.EntityInfo; import org.onedatashare.transferservice.odstransferservice.model.TransferJobRequest; import org.onedatashare.transferservice.odstransferservice.service.JobControl; -import org.onedatashare.transferservice.odstransferservice.service.JobParamService; import org.onedatashare.transferservice.odstransferservice.service.expanders.ExpanderFactory; import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import org.springframework.amqp.core.Message; -import org.springframework.batch.core.JobParameters; -import org.springframework.batch.core.JobParametersBuilder; -import org.springframework.batch.core.launch.JobLauncher; -import org.springframework.beans.factory.annotation.Autowired; import org.springframework.stereotype.Service; -import java.io.IOException; import java.util.List; @Service @@ -35,8 +30,8 @@ public TransferJobRequestHandler(ObjectMapper messageObjectMapper, JobControl jo } @Override - public void messageHandler(Message message) throws IOException { - String jsonStr = new String(message.getBody()); + public void messageHandler(HazelcastJsonValue jsonMessage) throws JsonProcessingException { + String jsonStr = jsonMessage.getValue(); TransferJobRequest request = objectMapper.readValue(jsonStr, TransferJobRequest.class); logger.info("Job Received: {}", request.toString()); List fileInfo = expanderFactory.getExpander(request.getSource()); diff --git a/src/main/java/org/onedatashare/transferservice/odstransferservice/model/CarbonMeasureRequest.java b/src/main/java/org/onedatashare/transferservice/odstransferservice/model/CarbonMeasureRequest.java deleted file mode 100644 index dbf9f59e..00000000 --- a/src/main/java/org/onedatashare/transferservice/odstransferservice/model/CarbonMeasureRequest.java +++ /dev/null @@ -1,10 +0,0 @@ -package org.onedatashare.transferservice.odstransferservice.model; - -import lombok.Data; - -@Data -public class CarbonMeasureRequest { - public String transferNodeName; - public String sourceIp; - public String destinationIp; -} diff --git a/src/main/java/org/onedatashare/transferservice/odstransferservice/model/CarbonMeasureResponse.java b/src/main/java/org/onedatashare/transferservice/odstransferservice/model/CarbonMeasureResponse.java deleted file mode 100644 index 5f260990..00000000 --- a/src/main/java/org/onedatashare/transferservice/odstransferservice/model/CarbonMeasureResponse.java +++ /dev/null @@ -1,9 +0,0 @@ -package org.onedatashare.transferservice.odstransferservice.model; - -import lombok.Data; - -@Data -public class CarbonMeasureResponse { - public String transferNodeName; - public Double averageCarbonIntensity; -} diff --git a/src/main/java/org/onedatashare/transferservice/odstransferservice/model/credential/AccountEndpointCredential.java b/src/main/java/org/onedatashare/transferservice/odstransferservice/model/credential/AccountEndpointCredential.java index c816c33c..1777e80b 100644 --- a/src/main/java/org/onedatashare/transferservice/odstransferservice/model/credential/AccountEndpointCredential.java +++ b/src/main/java/org/onedatashare/transferservice/odstransferservice/model/credential/AccountEndpointCredential.java @@ -8,7 +8,7 @@ @Data @JsonIgnoreProperties(ignoreUnknown = true) -public class AccountEndpointCredential extends EndpointCredential{ +public class AccountEndpointCredential extends EndpointCredential { private String uri; //the hostname and port to reach the server private String username; //this should be the username for the client @ToString.Exclude @@ -18,12 +18,11 @@ public class AccountEndpointCredential extends EndpointCredential{ public static String[] uriFormat(AccountEndpointCredential credential, EndpointType type) { String noTypeUri = ""; - if(type.equals(EndpointType.sftp)){ + if (type.equals(EndpointType.sftp)) { noTypeUri = credential.getUri().replaceFirst("sftp://", ""); - }else if(type.equals(EndpointType.ftp)){ + } else if (type.equals(EndpointType.ftp)) { noTypeUri = credential.getUri().replaceFirst("ftp://", ""); - } - else{ + } else { noTypeUri = credential.getUri().replaceFirst("http://", ""); } return noTypeUri.split(":"); diff --git a/src/main/java/org/onedatashare/transferservice/odstransferservice/service/CarbonJobMeasure.java b/src/main/java/org/onedatashare/transferservice/odstransferservice/service/CarbonJobMeasure.java new file mode 100644 index 00000000..97dfa097 --- /dev/null +++ b/src/main/java/org/onedatashare/transferservice/odstransferservice/service/CarbonJobMeasure.java @@ -0,0 +1,73 @@ +package org.onedatashare.transferservice.odstransferservice.service; + +import com.fasterxml.jackson.core.JsonProcessingException; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.hazelcast.core.HazelcastJsonValue; +import com.hazelcast.map.IMap; +import com.hazelcast.query.PredicateBuilder; +import com.hazelcast.query.Predicates; +import org.onedatashare.transferservice.odstransferservice.model.CarbonIpEntry; +import org.onedatashare.transferservice.odstransferservice.model.TransferJobRequest; +import org.onedatashare.transferservice.odstransferservice.utility.ODSUtility; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.springframework.beans.factory.annotation.Value; +import org.springframework.scheduling.annotation.Scheduled; +import org.springframework.stereotype.Service; + +import java.io.IOException; +import java.util.Collection; +import java.util.List; +import java.util.UUID; + +@Service +public class CarbonJobMeasure { + + private final IMap carbonIntensityMap; + private final IMap fileTransferScheduleMap; + private final PredicateBuilder.EntryObject entryObj; + private final PmeterParser pmeterParser; + private final ObjectMapper objectMapper; + private final Logger logger = LoggerFactory.getLogger(CarbonJobMeasure.class); + + @Value("spring.application.name") + private String appName; + + public CarbonJobMeasure(IMap carbonIntensityMap, IMap fileTransferScheduleMap, PmeterParser pmeterParser, ObjectMapper objectMapper) { + this.carbonIntensityMap = carbonIntensityMap; + this.fileTransferScheduleMap = fileTransferScheduleMap; + this.entryObj = Predicates.newPredicateBuilder().getEntryObject(); + this.pmeterParser = pmeterParser; + this.objectMapper = objectMapper; + } + + @Scheduled(cron = "0 0/10 * * * ?") + public void measureCarbonOfPotentialJobs() { + Collection scheduledJobsJson = this.fileTransferScheduleMap.values(this.entryObj.get("options.transferNodeName").equal(this.appName)); + scheduledJobsJson.forEach(hazelcastJsonValue -> { + try { + TransferJobRequest transferJobRequest = this.objectMapper.readValue(hazelcastJsonValue.getValue(), TransferJobRequest.class); + String sourceIp = ""; + if (transferJobRequest.getSource().getVfsSourceCredential() != null) { + sourceIp = ODSUtility.uriFromEndpointCredential(transferJobRequest.getSource().getVfsSourceCredential(), transferJobRequest.getSource().getType()); + } else { + sourceIp = ODSUtility.uriFromEndpointCredential(transferJobRequest.getSource().getOauthSourceCredential(), transferJobRequest.getSource().getType()); + } + String destIp = ""; + if (transferJobRequest.getDestination().getVfsDestCredential() != null) { + destIp = ODSUtility.uriFromEndpointCredential(transferJobRequest.getDestination().getVfsDestCredential(), transferJobRequest.getDestination().getType()); + } else { + destIp = ODSUtility.uriFromEndpointCredential(transferJobRequest.getDestination().getOauthDestCredential(), transferJobRequest.getDestination().getType()); + } + List sourceCarbonPerIp = this.pmeterParser.carbonPerIp(sourceIp); + sourceCarbonPerIp.addAll(this.pmeterParser.carbonPerIp(destIp)); + this.carbonIntensityMap.put(transferJobRequest.getOwnerId() + "-" + transferJobRequest.getTransferNodeName() + "-" + transferJobRequest.getJobUuid().toString(), new HazelcastJsonValue(this.objectMapper.writeValueAsString(sourceCarbonPerIp))); + } catch (JsonProcessingException e) { + logger.error("Failed to parse job: {} \n Error received: \t {}", hazelcastJsonValue.getValue(), e.getMessage()); + } catch (IOException e) { + logger.error("Failed to measure ip: {} \n Error received: \t {}", hazelcastJsonValue.getValue(), e.getMessage()); + } + }); + } + +} diff --git a/src/main/java/org/onedatashare/transferservice/odstransferservice/service/HazelcastConsumer.java b/src/main/java/org/onedatashare/transferservice/odstransferservice/service/HazelcastConsumer.java new file mode 100644 index 00000000..4a7d185d --- /dev/null +++ b/src/main/java/org/onedatashare/transferservice/odstransferservice/service/HazelcastConsumer.java @@ -0,0 +1,66 @@ +package org.onedatashare.transferservice.odstransferservice.service; + +import com.fasterxml.jackson.core.JsonProcessingException; +import com.fasterxml.jackson.databind.JsonNode; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.node.ObjectNode; +import com.hazelcast.collection.IQueue; +import com.hazelcast.core.HazelcastJsonValue; +import jakarta.annotation.PostConstruct; +import org.onedatashare.transferservice.odstransferservice.Enum.MessageType; +import org.onedatashare.transferservice.odstransferservice.message.TransferApplicationParamHandler; +import org.onedatashare.transferservice.odstransferservice.message.TransferJobRequestHandler; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.springframework.stereotype.Service; + +@Service +public class HazelcastConsumer implements Runnable { + + private final IQueue messageQueue; + private final ObjectMapper objectMapper; + private final TransferJobRequestHandler transferJobRequestHandler; + private final TransferApplicationParamHandler transferParamApplicationHandler; + private final Logger logger; + private Thread consumerThread; + + public HazelcastConsumer(IQueue messageQueue, ObjectMapper objectMapper, TransferJobRequestHandler transferJobRequestHandler, TransferApplicationParamHandler transferApplicationParamHandler) { + this.messageQueue = messageQueue; + this.transferJobRequestHandler = transferJobRequestHandler; + this.objectMapper = objectMapper; + this.transferParamApplicationHandler = transferApplicationParamHandler; + this.logger = LoggerFactory.getLogger(HazelcastConsumer.class); + this.consumerThread = new Thread(this); + } + + @PostConstruct + public void init() { + this.consumerThread.start(); + } + + + @Override + public void run() { + while (true) { + try { + HazelcastJsonValue jsonMsg = this.messageQueue.take(); + JsonNode jsonNode = this.objectMapper.readTree(jsonMsg.getValue()); + String type = ((ObjectNode) jsonNode).get("type").toString(); + ((ObjectNode) jsonNode).remove("type"); + HazelcastJsonValue properJsonMsg = new HazelcastJsonValue(jsonNode.toString()); + logger.info("Received message: {}", properJsonMsg); + switch (MessageType.valueOf(type)) { + case TRANSFER_JOB_REQUEST: + this.transferJobRequestHandler.messageHandler(properJsonMsg); + break; + + case APPLICATION_PARAM_CHANGE: + this.transferParamApplicationHandler.messageHandler(properJsonMsg); + } + } catch (InterruptedException | JsonProcessingException e) { + logger.error(e.getMessage()); + } + + } + } +} diff --git a/src/main/java/org/onedatashare/transferservice/odstransferservice/service/DatabaseService/InfluxIOService.java b/src/main/java/org/onedatashare/transferservice/odstransferservice/service/InfluxIOService.java similarity index 99% rename from src/main/java/org/onedatashare/transferservice/odstransferservice/service/DatabaseService/InfluxIOService.java rename to src/main/java/org/onedatashare/transferservice/odstransferservice/service/InfluxIOService.java index 3f0e8bea..1d0adb93 100644 --- a/src/main/java/org/onedatashare/transferservice/odstransferservice/service/DatabaseService/InfluxIOService.java +++ b/src/main/java/org/onedatashare/transferservice/odstransferservice/service/InfluxIOService.java @@ -1,4 +1,4 @@ -package org.onedatashare.transferservice.odstransferservice.service.DatabaseService; +package org.onedatashare.transferservice.odstransferservice.service; import com.influxdb.client.InfluxDBClient; import com.influxdb.client.WriteApi; diff --git a/src/main/java/org/onedatashare/transferservice/odstransferservice/service/listner/JobCompletionListener.java b/src/main/java/org/onedatashare/transferservice/odstransferservice/service/JobCompletionListener.java similarity index 90% rename from src/main/java/org/onedatashare/transferservice/odstransferservice/service/listner/JobCompletionListener.java rename to src/main/java/org/onedatashare/transferservice/odstransferservice/service/JobCompletionListener.java index 2fd01f38..4312fbc9 100644 --- a/src/main/java/org/onedatashare/transferservice/odstransferservice/service/listner/JobCompletionListener.java +++ b/src/main/java/org/onedatashare/transferservice/odstransferservice/service/JobCompletionListener.java @@ -1,10 +1,7 @@ -package org.onedatashare.transferservice.odstransferservice.service.listner; +package org.onedatashare.transferservice.odstransferservice.service; import com.fasterxml.jackson.core.JsonProcessingException; import org.onedatashare.transferservice.odstransferservice.pools.ThreadPoolContract; -import org.onedatashare.transferservice.odstransferservice.service.ConnectionBag; -import org.onedatashare.transferservice.odstransferservice.service.FileTransferNodeRegistrationService; -import org.onedatashare.transferservice.odstransferservice.service.MetricsCollector; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.batch.core.JobExecution; diff --git a/src/main/java/org/onedatashare/transferservice/odstransferservice/service/JobControl.java b/src/main/java/org/onedatashare/transferservice/odstransferservice/service/JobControl.java index ab4de771..cf62267e 100644 --- a/src/main/java/org/onedatashare/transferservice/odstransferservice/service/JobControl.java +++ b/src/main/java/org/onedatashare/transferservice/odstransferservice/service/JobControl.java @@ -6,8 +6,6 @@ import org.onedatashare.transferservice.odstransferservice.model.DataChunk; import org.onedatashare.transferservice.odstransferservice.model.TransferJobRequest; import org.onedatashare.transferservice.odstransferservice.pools.ThreadPoolContract; -import org.onedatashare.transferservice.odstransferservice.service.DatabaseService.InfluxIOService; -import org.onedatashare.transferservice.odstransferservice.service.listner.JobCompletionListener; import org.onedatashare.transferservice.odstransferservice.service.step.ReaderWriterFactory; import org.slf4j.Logger; import org.slf4j.LoggerFactory; diff --git a/src/main/java/org/onedatashare/transferservice/odstransferservice/service/JobParamService.java b/src/main/java/org/onedatashare/transferservice/odstransferservice/service/JobParamService.java index ebde47dc..3cfaf054 100644 --- a/src/main/java/org/onedatashare/transferservice/odstransferservice/service/JobParamService.java +++ b/src/main/java/org/onedatashare/transferservice/odstransferservice/service/JobParamService.java @@ -6,12 +6,11 @@ import org.onedatashare.transferservice.odstransferservice.model.credential.AccountEndpointCredential; import org.onedatashare.transferservice.odstransferservice.model.credential.EndpointCredential; import org.onedatashare.transferservice.odstransferservice.model.metrics.CarbonScore; -import org.onedatashare.transferservice.odstransferservice.utility.S3Utility; +import org.onedatashare.transferservice.odstransferservice.utility.ODSUtility; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.batch.core.JobParameters; import org.springframework.batch.core.JobParametersBuilder; -import org.springframework.beans.factory.annotation.Autowired; import org.springframework.beans.factory.annotation.Value; import org.springframework.stereotype.Service; @@ -28,8 +27,12 @@ public class JobParamService { @Value("${spring.application.name}") private String appName; - @Autowired PmeterParser pmeterParser; + + public JobParamService(PmeterParser pmeterParser) { + this.pmeterParser = pmeterParser; + } + /** * Here we are adding basically the whole request except for sensitive credentials to the Job Params table. * B/C we do not add @@ -76,54 +79,30 @@ public JobParameters translate(JobParametersBuilder builder, TransferJobRequest //adding the source host and source port to use for RTT & Latency measurements. if (request.getSource().getVfsSourceCredential() != null) { - String sourceIp = this.uriFromEndpointCredential(request.getSource().getVfsSourceCredential(), sourceType); + String sourceIp = ODSUtility.uriFromEndpointCredential(request.getSource().getVfsSourceCredential(), sourceType); builder.addString(SOURCE_HOST, sourceIp); builder.addLong(SOURCE_PORT, (long) this.portFromEndpointCredential(request.getSource().getVfsSourceCredential(), sourceType)); CarbonScore score = this.pmeterParser.carbonAverageTraceRoute(sourceIp); logger.info("Source Carbon Score: {}", score.avgCarbon); builder.addLong(CARBON_SCORE_SOURCE, (long) score.avgCarbon); } else if (request.getSource().getOauthSourceCredential() != null) { - builder.addString(SOURCE_HOST, this.uriFromEndpointCredential(request.getSource().getOauthSourceCredential(), sourceType)); + builder.addString(SOURCE_HOST, ODSUtility.uriFromEndpointCredential(request.getSource().getOauthSourceCredential(), sourceType)); builder.addLong(SOURCE_PORT, (long) this.portFromEndpointCredential(request.getSource().getOauthSourceCredential(), sourceType)); } if (request.getDestination().getVfsDestCredential() != null) { - String destIp = this.uriFromEndpointCredential(request.getDestination().getVfsDestCredential(), destType); + String destIp = ODSUtility.uriFromEndpointCredential(request.getDestination().getVfsDestCredential(), destType); builder.addString(DEST_HOST, destIp); builder.addLong(DEST_PORT, (long) this.portFromEndpointCredential(request.getDestination().getVfsDestCredential(), destType)); CarbonScore score = this.pmeterParser.carbonAverageTraceRoute(destIp); logger.info("Destination Carbon Score: {}", score.avgCarbon); - builder.addLong(CARBON_SCORE_DEST, (long)score.avgCarbon); + builder.addLong(CARBON_SCORE_DEST, (long) score.avgCarbon); } else if (request.getDestination().getOauthDestCredential() != null) { - builder.addString(DEST_HOST, this.uriFromEndpointCredential(request.getDestination().getOauthDestCredential(), destType)); + builder.addString(DEST_HOST, ODSUtility.uriFromEndpointCredential(request.getDestination().getOauthDestCredential(), destType)); builder.addLong(DEST_PORT, (long) this.portFromEndpointCredential(request.getDestination().getOauthDestCredential(), destType)); } return builder.toJobParameters(); } - public String uriFromEndpointCredential(EndpointCredential credential, EndpointType type) { - AccountEndpointCredential ac; - switch (type) { - case ftp: - case sftp: - case scp: - case http: - ac = (AccountEndpointCredential) credential; - URI uri = URI.create(ac.getUri()); - return uri.getHost(); - case s3: - ac = (AccountEndpointCredential) credential; - URI s3Uri = URI.create(S3Utility.constructS3URI(ac.getUri(), "")); - return s3Uri.getHost(); - case box: - return "box.com"; - case dropbox: - return "dropbox.com"; - case gdrive: - return "drive.google.com"; - default: - return ""; - } - } public int portFromEndpointCredential(EndpointCredential credential, EndpointType type) { switch (type) { diff --git a/src/main/java/org/onedatashare/transferservice/odstransferservice/service/MetricsCollector.java b/src/main/java/org/onedatashare/transferservice/odstransferservice/service/MetricsCollector.java index f722b67a..e330f9b9 100644 --- a/src/main/java/org/onedatashare/transferservice/odstransferservice/service/MetricsCollector.java +++ b/src/main/java/org/onedatashare/transferservice/odstransferservice/service/MetricsCollector.java @@ -6,7 +6,6 @@ import org.onedatashare.transferservice.odstransferservice.constant.ODSConstants; import org.onedatashare.transferservice.odstransferservice.model.JobMetric; import org.onedatashare.transferservice.odstransferservice.model.metrics.DataInflux; -import org.onedatashare.transferservice.odstransferservice.service.DatabaseService.InfluxIOService; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.batch.core.JobParameters; diff --git a/src/main/java/org/onedatashare/transferservice/odstransferservice/utility/ODSUtility.java b/src/main/java/org/onedatashare/transferservice/odstransferservice/utility/ODSUtility.java index 803158f5..13673822 100644 --- a/src/main/java/org/onedatashare/transferservice/odstransferservice/utility/ODSUtility.java +++ b/src/main/java/org/onedatashare/transferservice/odstransferservice/utility/ODSUtility.java @@ -10,10 +10,13 @@ import com.google.api.services.drive.model.FileList; import org.onedatashare.transferservice.odstransferservice.Enum.EndpointType; import org.onedatashare.transferservice.odstransferservice.model.DataChunk; +import org.onedatashare.transferservice.odstransferservice.model.credential.AccountEndpointCredential; +import org.onedatashare.transferservice.odstransferservice.model.credential.EndpointCredential; import org.onedatashare.transferservice.odstransferservice.model.credential.OAuthEndpointCredential; import java.io.ByteArrayInputStream; import java.io.IOException; +import java.net.URI; import java.security.GeneralSecurityException; import java.util.Arrays; import java.util.HashSet; @@ -22,10 +25,10 @@ public class ODSUtility { private static String odsClientID = "OneDataShare-DIDCLab"; -// @Value("${gdrive.client.id}") + // @Value("${gdrive.client.id}") private static String gDriveClientId = System.getenv("ODS_GDRIVE_CLIENT_ID"); -// @Value("${gdrive.client.secret}") + // @Value("${gdrive.client.secret}") private static String gDriveClientSecret = System.getenv("ODS_GDRIVE_CLIENT_SECRET"); // @Value("${gdrive.appname}") @@ -42,6 +45,7 @@ public static DataChunk makeChunk(int size, byte[] data, int startPosition, int dataChunk.setSize(size); return dataChunk; } + public static DataChunk makeChunk(long size, byte[] data, long startPosition, int chunkIdx, String fileName) { DataChunk dataChunk = new DataChunk(); dataChunk.setStartPosition(startPosition); @@ -71,15 +75,15 @@ public static File gdriveMakeDir(String basePath, Drive client) throws IOExcepti .setFields("nextPageToken, files(id,name)") .setSpaces("drive"); FileList files = request.execute(); - for(File file : files.getFiles()){ - if(file.getId().equals(basePath)){ + for (File file : files.getFiles()) { + if (file.getId().equals(basePath)) { return file; } } File fileMetadata = new File(); - File ret= new File(); + File ret = new File(); String[] path = basePath.split("/"); - for(String mini: path){ + for (String mini : path) { fileMetadata.setName(mini); fileMetadata.setMimeType("application/vnd.google-apps.folder"); ret = client.files().create(fileMetadata) @@ -98,7 +102,7 @@ public static UploadPartRequest makePartRequest(DataChunk dataChunk, String buck uploadPartRequest.setUploadId(uploadId); uploadPartRequest.setKey(key); // uploadPartRequest.setFileOffset(dataChunk.getStartPosition()); - uploadPartRequest.setPartNumber(dataChunk.getChunkIdx()+1); //by default we start from chunks 0-N but AWS SDK must have 1-10000 so we just add 1 + uploadPartRequest.setPartNumber(dataChunk.getChunkIdx() + 1); //by default we start from chunks 0-N but AWS SDK must have 1-10000 so we just add 1 uploadPartRequest.setPartSize(dataChunk.getSize()); return uploadPartRequest; } @@ -106,4 +110,29 @@ public static UploadPartRequest makePartRequest(DataChunk dataChunk, String buck public static final EndpointType[] SEEKABLE_PROTOCOLS = new EndpointType[]{EndpointType.s3, EndpointType.vfs, EndpointType.http, EndpointType.box}; public static final HashSet fullyOptimizableProtocols = new HashSet(Arrays.asList(SEEKABLE_PROTOCOLS)); + + public static String uriFromEndpointCredential(EndpointCredential credential, EndpointType type) { + AccountEndpointCredential ac; + switch (type) { + case ftp: + case sftp: + case scp: + case http: + ac = (AccountEndpointCredential) credential; + URI uri = URI.create(ac.getUri()); + return uri.getHost(); + case s3: + ac = (AccountEndpointCredential) credential; + URI s3Uri = URI.create(S3Utility.constructS3URI(ac.getUri(), "")); + return s3Uri.getHost(); + case box: + return "box.com"; + case dropbox: + return "dropbox.com"; + case gdrive: + return "drive.google.com"; + default: + return ""; + } + } } diff --git a/src/main/resources/application.properties b/src/main/resources/application.properties index fb62b1a1..45fa63aa 100644 --- a/src/main/resources/application.properties +++ b/src/main/resources/application.properties @@ -7,6 +7,14 @@ spring.main.allow-bean-definition-overriding=true gdrive.client.id=${ODS_GDRIVE_CLIENT_ID} gdrive.client.secret=${ODS_GDRIVE_CLIENT_SECRET} +#Eureka config +#eureka.client.enabled=true +#eureka.client.serviceUrl.defaultZone=http://${EUREKA_USER:admin}:${EUREKA_PASS:admin}@${EUREKA_URI:localhost:8090}/eureka +#eureka.client.healthcheck.enabled=true +#eureka.client.registry-fetch-interval-seconds=5 +#eureka.instance.leaseRenewalIntervalInSeconds=10 +#eureka.instance.metadata-map.startup=${random.int} + #SBA management.endpoints.web.exposure.include=* From 1ea8223db1da874c7d7105a96ca1e6c5d260982a Mon Sep 17 00:00:00 2001 From: Jacob Goldverg Date: Tue, 24 Sep 2024 11:52:41 -0400 Subject: [PATCH 127/150] Aright everything is all set for rabbitmq work which ill deprecate ina month, using hazelcast instead --- .../odstransferservice/config/BatchConfig.java | 7 +++++-- .../odstransferservice/consumer/RabbitMQConsumer.java | 2 -- .../message/TransferJobRequestHandler.java | 8 ++++---- 3 files changed, 9 insertions(+), 8 deletions(-) diff --git a/src/main/java/org/onedatashare/transferservice/odstransferservice/config/BatchConfig.java b/src/main/java/org/onedatashare/transferservice/odstransferservice/config/BatchConfig.java index 0171e420..cbba991c 100644 --- a/src/main/java/org/onedatashare/transferservice/odstransferservice/config/BatchConfig.java +++ b/src/main/java/org/onedatashare/transferservice/odstransferservice/config/BatchConfig.java @@ -36,10 +36,13 @@ public PlatformTransactionManager transactionManager(DataSource dataSource) { } @Bean - public JobLauncher jobLauncher(JobRepository jobRepository) { + public JobLauncher asyncJobLauncher(JobRepository jobRepository) { TaskExecutorJobLauncher jobLauncher = new TaskExecutorJobLauncher(); jobLauncher.setJobRepository(jobRepository); - jobLauncher.setTaskExecutor(new SimpleAsyncTaskExecutor()); + SimpleAsyncTaskExecutor taskExecutor = new SimpleAsyncTaskExecutor(); + taskExecutor.setConcurrencyLimit(4); // Adjust the limit based on the desired parallelism + taskExecutor.setThreadNamePrefix("BatchJobExecutor-"); + jobLauncher.setTaskExecutor(taskExecutor); return jobLauncher; } diff --git a/src/main/java/org/onedatashare/transferservice/odstransferservice/consumer/RabbitMQConsumer.java b/src/main/java/org/onedatashare/transferservice/odstransferservice/consumer/RabbitMQConsumer.java index 13b7ad6d..c667f24c 100644 --- a/src/main/java/org/onedatashare/transferservice/odstransferservice/consumer/RabbitMQConsumer.java +++ b/src/main/java/org/onedatashare/transferservice/odstransferservice/consumer/RabbitMQConsumer.java @@ -68,8 +68,6 @@ public void consumeDefaultMessage(Message message) { } catch (IOException e) { e.printStackTrace(); } - -// channel.basicAck(tag, false); } public static MessagePostProcessor embedMessageType(String correlationId) { diff --git a/src/main/java/org/onedatashare/transferservice/odstransferservice/message/TransferJobRequestHandler.java b/src/main/java/org/onedatashare/transferservice/odstransferservice/message/TransferJobRequestHandler.java index 039c0ce9..b8f30cc6 100644 --- a/src/main/java/org/onedatashare/transferservice/odstransferservice/message/TransferJobRequestHandler.java +++ b/src/main/java/org/onedatashare/transferservice/odstransferservice/message/TransferJobRequestHandler.java @@ -23,17 +23,17 @@ public class TransferJobRequestHandler implements MessageHandler { private final ObjectMapper objectMapper; private final JobParamService jobParamService; - private final JobLauncher jobLauncher; + private final JobLauncher asyncJobLauncher; private final JobControl jobControl; private final ExpanderFactory expanderFactory; Logger logger = LoggerFactory.getLogger(TransferJobRequestHandler.class); - public TransferJobRequestHandler(ObjectMapper messageObjectMapper, JobParamService jobParamService, JobLauncher jobLauncher, JobControl jobControl, ExpanderFactory expanderFactory) { + public TransferJobRequestHandler(ObjectMapper messageObjectMapper, JobParamService jobParamService, JobLauncher asyncJobLauncher, JobControl jobControl, ExpanderFactory expanderFactory) { this.objectMapper = messageObjectMapper; this.jobParamService = jobParamService; - this.jobLauncher = jobLauncher; + this.asyncJobLauncher = asyncJobLauncher; this.jobControl = jobControl; this.expanderFactory = expanderFactory; } @@ -47,7 +47,7 @@ public void messageHandler(Message message) throws IOException { request.getSource().setInfoList(fileInfo); JobParameters parameters = jobParamService.translate(new JobParametersBuilder(), request); try { - jobLauncher.run(jobControl.concurrentJobDefinition(request), parameters); + asyncJobLauncher.run(jobControl.concurrentJobDefinition(request), parameters); } catch (Exception e) { logger.error(e.getMessage()); } From 35e43f89219482181544e7d6acddf1c07f1efc97 Mon Sep 17 00:00:00 2001 From: Jacob Goldverg Date: Mon, 28 Oct 2024 08:09:39 -0400 Subject: [PATCH 128/150] fully removed rabbitmq and now using hazelcast, more changes coming --- pom.xml | 42 +++-- .../config/BatchConfig.java | 4 +- .../config/HazelcastClientConfig.java | 45 ++++-- .../TransferApplicationParamHandler.java | 1 - .../model/CarbonIntensityMapKey.java | 18 +++ .../odstransferservice/model/FileBuffer.java | 34 ---- .../model/SmallFileUpload.java | 4 + .../service/CarbonJobMeasure.java | 41 ++++- ...sferNodeRegistrationLifeCycleListener.java | 2 +- .../FileTransferNodeRegistrationService.java | 4 +- .../service/JobCompletionListener.java | 5 +- .../service/PmeterParser.java | 66 ++++++-- .../service/VaultSSLService.java | 153 ++++++++++++++++++ .../service/step/AmazonS3/AmazonS3Reader.java | 3 +- .../service/step/DynamicStep.java | 28 ---- .../service/step/Processor.java | 19 --- .../odstransferservice/utility/S3Utility.java | 28 ---- src/main/resources/application.properties | 8 +- .../service/CarbonJobMeasureTest.java | 69 ++++++++ 19 files changed, 408 insertions(+), 166 deletions(-) create mode 100644 src/main/java/org/onedatashare/transferservice/odstransferservice/model/CarbonIntensityMapKey.java delete mode 100644 src/main/java/org/onedatashare/transferservice/odstransferservice/model/FileBuffer.java create mode 100644 src/main/java/org/onedatashare/transferservice/odstransferservice/service/VaultSSLService.java delete mode 100644 src/main/java/org/onedatashare/transferservice/odstransferservice/service/step/DynamicStep.java delete mode 100644 src/main/java/org/onedatashare/transferservice/odstransferservice/service/step/Processor.java delete mode 100644 src/main/java/org/onedatashare/transferservice/odstransferservice/utility/S3Utility.java create mode 100644 src/test/java/org/onedatashare/transferservice/odstransferservice/service/CarbonJobMeasureTest.java diff --git a/pom.xml b/pom.xml index 5392a70a..1413894c 100644 --- a/pom.xml +++ b/pom.xml @@ -36,6 +36,17 @@ false + + private-repository + Hazelcast Private Repository + https://repository.hazelcast.com/release/ + + true + + + false + + @@ -44,14 +55,19 @@ com.hazelcast - hazelcast - 5.5.0 + hazelcast-enterprise + 5.5.1 org.springframework.cloud spring-cloud-starter-vault-config + + org.springframework.vault + spring-vault-core + 3.1.2 + org.springframework.cloud spring-cloud-starter-bootstrap @@ -149,17 +165,17 @@ org.springframework.boot spring-boot-starter-web - - - - - - - - - - - + + + + + + + + + + + org.springframework.cloud spring-cloud-starter-netflix-eureka-client diff --git a/src/main/java/org/onedatashare/transferservice/odstransferservice/config/BatchConfig.java b/src/main/java/org/onedatashare/transferservice/odstransferservice/config/BatchConfig.java index 3086d30e..436ddf52 100644 --- a/src/main/java/org/onedatashare/transferservice/odstransferservice/config/BatchConfig.java +++ b/src/main/java/org/onedatashare/transferservice/odstransferservice/config/BatchConfig.java @@ -26,7 +26,9 @@ public PlatformTransactionManager transactionManager(DataSource dataSource) { public JobLauncher jobLauncher(JobRepository jobRepository) { TaskExecutorJobLauncher jobLauncher = new TaskExecutorJobLauncher(); jobLauncher.setJobRepository(jobRepository); - jobLauncher.setTaskExecutor(new SimpleAsyncTaskExecutor()); + SimpleAsyncTaskExecutor taskExecutor = new SimpleAsyncTaskExecutor(); + taskExecutor.setConcurrencyLimit(1); + jobLauncher.setTaskExecutor(taskExecutor); return jobLauncher; } diff --git a/src/main/java/org/onedatashare/transferservice/odstransferservice/config/HazelcastClientConfig.java b/src/main/java/org/onedatashare/transferservice/odstransferservice/config/HazelcastClientConfig.java index 500c8c52..45e04a22 100644 --- a/src/main/java/org/onedatashare/transferservice/odstransferservice/config/HazelcastClientConfig.java +++ b/src/main/java/org/onedatashare/transferservice/odstransferservice/config/HazelcastClientConfig.java @@ -4,57 +4,82 @@ import com.hazelcast.client.HazelcastClient; import com.hazelcast.client.config.ClientConfig; import com.hazelcast.collection.IQueue; +import com.hazelcast.config.SSLConfig; import com.hazelcast.core.HazelcastInstance; import com.hazelcast.core.HazelcastJsonValue; import com.hazelcast.map.IMap; import org.onedatashare.transferservice.odstransferservice.service.FileTransferNodeRegistrationLifeCycleListener; +import org.onedatashare.transferservice.odstransferservice.service.VaultSSLService; import org.springframework.beans.factory.annotation.Qualifier; import org.springframework.beans.factory.annotation.Value; import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.Configuration; import org.springframework.context.annotation.Profile; import org.springframework.core.env.Environment; +import org.springframework.vault.core.VaultTemplate; +import java.util.Properties; import java.util.UUID; @Configuration public class HazelcastClientConfig { - private final Environment environment; + private final Environment env; private final ObjectMapper objectMapper; + private final VaultSSLService vaultSslService; - public HazelcastClientConfig(Environment environment, ObjectMapper objectMapper) { - this.environment = environment; + public HazelcastClientConfig(Environment environment, ObjectMapper objectMapper, VaultTemplate vaultTemplate, VaultSSLService vaultSSLService) { + this.env = environment; this.objectMapper = objectMapper; + this.vaultSslService = vaultSSLService; } - @Value("spring.application.name") + @Value("${spring.application.name}") private String appName; @Bean @Qualifier("clientConfig") @Profile("local") - public ClientConfig devClientConfig() { + public ClientConfig devClientConfig(SSLConfig sslConfig) { ClientConfig clientConfig = new ClientConfig(); clientConfig.setClusterName("dev-scheduler-cluster"); - clientConfig.getNetworkConfig().addAddress("127.0.0.1"); + clientConfig.getNetworkConfig().setSSLConfig(sslConfig); + clientConfig.setInstanceName(this.appName); return clientConfig; } @Bean @Qualifier("clientConfig") @Profile({"prod", "eks", "ec2",}) - public ClientConfig prodClientConfig() { + public ClientConfig prodClientConfig(SSLConfig sslConfig) { ClientConfig clientConfig = new ClientConfig(); clientConfig.setClusterName("prod-scheduler-cluster"); - clientConfig.getNetworkConfig().getEurekaConfig().setEnabled(true); + clientConfig.getNetworkConfig().setSSLConfig(sslConfig); return clientConfig; } + @Bean + public SSLConfig sslConfig() { + Properties properties = new Properties(); + properties.setProperty("protocol", "TLSv1.2"); + properties.setProperty("mutualAuthentication", "OPTIONAL"); + properties.setProperty("trustStore", this.vaultSslService.getStorePath().toAbsolutePath().toString()); + properties.setProperty("trustStorePassword", env.getProperty("hz.keystore.password", "changeit")); + properties.setProperty("trustStoreType", "PKCS12"); + properties.setProperty("keyMaterialDuration", this.vaultSslService.getStoreDuration().toString()); + properties.setProperty("validateIdentity", "false"); + + SSLConfig sslConfig = new SSLConfig(); + sslConfig.setEnabled(true); + sslConfig.setProperties(properties); +// sslConfig.setFactoryImplementation(this.vaultSslService); + return sslConfig; + } + @Bean public HazelcastInstance hazelcastInstance(ClientConfig clientConfig) { HazelcastInstance hazelcastInstance = HazelcastClient.newHazelcastClient(clientConfig); - FileTransferNodeRegistrationLifeCycleListener fileTransferNodeRegistrationEventListener = new FileTransferNodeRegistrationLifeCycleListener(hazelcastInstance, environment, objectMapper); + FileTransferNodeRegistrationLifeCycleListener fileTransferNodeRegistrationEventListener = new FileTransferNodeRegistrationLifeCycleListener(hazelcastInstance, env, objectMapper); hazelcastInstance.getLifecycleService().addLifecycleListener(fileTransferNodeRegistrationEventListener); return hazelcastInstance; } @@ -70,7 +95,7 @@ public IMap fileTransferScheduleMap(@Qualifier("hazelc } @Bean - public IMap carbonIntensityMap(@Qualifier("hazelcastInstance") HazelcastInstance hazelcastInstance) { + public IMap carbsonIntensityMap(@Qualifier("hazelcastInstance") HazelcastInstance hazelcastInstance) { return hazelcastInstance.getMap("carbon-intensity-map"); } diff --git a/src/main/java/org/onedatashare/transferservice/odstransferservice/message/TransferApplicationParamHandler.java b/src/main/java/org/onedatashare/transferservice/odstransferservice/message/TransferApplicationParamHandler.java index 95a2f534..0efbf52b 100644 --- a/src/main/java/org/onedatashare/transferservice/odstransferservice/message/TransferApplicationParamHandler.java +++ b/src/main/java/org/onedatashare/transferservice/odstransferservice/message/TransferApplicationParamHandler.java @@ -27,6 +27,5 @@ public void messageHandler(HazelcastJsonValue jsonMsg) throws JsonProcessingExce TransferApplicationParams params = mesageObjectMapper.readValue(jsonStr, TransferApplicationParams.class); logger.info("Parsed TransferApplicationParams: {}", params); this.threadPool.applyOptimizer(params.getConcurrency(), params.getParallelism()); - } } diff --git a/src/main/java/org/onedatashare/transferservice/odstransferservice/model/CarbonIntensityMapKey.java b/src/main/java/org/onedatashare/transferservice/odstransferservice/model/CarbonIntensityMapKey.java new file mode 100644 index 00000000..40038a2e --- /dev/null +++ b/src/main/java/org/onedatashare/transferservice/odstransferservice/model/CarbonIntensityMapKey.java @@ -0,0 +1,18 @@ +package org.onedatashare.transferservice.odstransferservice.model; + +import lombok.AllArgsConstructor; +import lombok.Data; +import lombok.NoArgsConstructor; + +import java.time.LocalDateTime; +import java.util.UUID; + +@Data +@AllArgsConstructor +@NoArgsConstructor +public class CarbonIntensityMapKey { + String ownerId; + String transferNodeName; + UUID jobUuid; + LocalDateTime timeMeasuredAt; +} diff --git a/src/main/java/org/onedatashare/transferservice/odstransferservice/model/FileBuffer.java b/src/main/java/org/onedatashare/transferservice/odstransferservice/model/FileBuffer.java deleted file mode 100644 index d68d459d..00000000 --- a/src/main/java/org/onedatashare/transferservice/odstransferservice/model/FileBuffer.java +++ /dev/null @@ -1,34 +0,0 @@ -package org.onedatashare.transferservice.odstransferservice.model; - -import java.io.ByteArrayInputStream; -import java.io.InputStream; -import java.nio.ByteBuffer; -import java.util.List; -import java.util.PriorityQueue; - -public class FileBuffer { - private PriorityQueue dataChunkPriorityQueue; - - public FileBuffer(){ - this.dataChunkPriorityQueue = new PriorityQueue(new DataChunkComparator()); - } - public void addChunk(DataChunk chunk){ - this.dataChunkPriorityQueue.add(chunk); - } - public void addAllChunks(List chunks){ - this.dataChunkPriorityQueue.addAll(chunks); - } - - public InputStream condenseListToOneStream(long size){ - byte[] data = new byte[Long.valueOf(size).intValue()]; - ByteBuffer buffer = ByteBuffer.wrap(data); - for(DataChunk chunk : this.dataChunkPriorityQueue){ - buffer.put(chunk.getData()); - } - return new ByteArrayInputStream(buffer.array()); - } - - public void clear(){ - this.dataChunkPriorityQueue.clear(); - } -} diff --git a/src/main/java/org/onedatashare/transferservice/odstransferservice/model/SmallFileUpload.java b/src/main/java/org/onedatashare/transferservice/odstransferservice/model/SmallFileUpload.java index 41b3f893..fe16efdb 100644 --- a/src/main/java/org/onedatashare/transferservice/odstransferservice/model/SmallFileUpload.java +++ b/src/main/java/org/onedatashare/transferservice/odstransferservice/model/SmallFileUpload.java @@ -35,4 +35,8 @@ public InputStream condenseListToOneStream(){ return new ByteArrayInputStream(combinedBytes); } + + public void clearBuffer(){ + this.dataChunkPriorityQueue.clear(); + } } diff --git a/src/main/java/org/onedatashare/transferservice/odstransferservice/service/CarbonJobMeasure.java b/src/main/java/org/onedatashare/transferservice/odstransferservice/service/CarbonJobMeasure.java index 97dfa097..141b2585 100644 --- a/src/main/java/org/onedatashare/transferservice/odstransferservice/service/CarbonJobMeasure.java +++ b/src/main/java/org/onedatashare/transferservice/odstransferservice/service/CarbonJobMeasure.java @@ -4,8 +4,10 @@ import com.fasterxml.jackson.databind.ObjectMapper; import com.hazelcast.core.HazelcastJsonValue; import com.hazelcast.map.IMap; +import com.hazelcast.query.Predicate; import com.hazelcast.query.PredicateBuilder; import com.hazelcast.query.Predicates; +import org.onedatashare.transferservice.odstransferservice.model.CarbonIntensityMapKey; import org.onedatashare.transferservice.odstransferservice.model.CarbonIpEntry; import org.onedatashare.transferservice.odstransferservice.model.TransferJobRequest; import org.onedatashare.transferservice.odstransferservice.utility.ODSUtility; @@ -16,14 +18,16 @@ import org.springframework.stereotype.Service; import java.io.IOException; +import java.time.LocalDateTime; import java.util.Collection; import java.util.List; import java.util.UUID; +import java.util.stream.Collectors; @Service public class CarbonJobMeasure { - private final IMap carbonIntensityMap; + private final IMap carbonIntensityMap; private final IMap fileTransferScheduleMap; private final PredicateBuilder.EntryObject entryObj; private final PmeterParser pmeterParser; @@ -33,7 +37,10 @@ public class CarbonJobMeasure { @Value("spring.application.name") private String appName; - public CarbonJobMeasure(IMap carbonIntensityMap, IMap fileTransferScheduleMap, PmeterParser pmeterParser, ObjectMapper objectMapper) { + @Value("ods.user") + private String odsUser; + + public CarbonJobMeasure(IMap carbonIntensityMap, IMap fileTransferScheduleMap, PmeterParser pmeterParser, ObjectMapper objectMapper) { this.carbonIntensityMap = carbonIntensityMap; this.fileTransferScheduleMap = fileTransferScheduleMap; this.entryObj = Predicates.newPredicateBuilder().getEntryObject(); @@ -41,12 +48,29 @@ public CarbonJobMeasure(IMap carbonIntensityMap, IMa this.objectMapper = objectMapper; } + public List getPotentialJobsFromMap() { + Predicate potentialJobs; + if (odsUser.equals("onedatashare")) { + potentialJobs = this.entryObj.get("options.transferNodeName").equal(this.appName).or(this.entryObj.get("options.transferNodeName").equal("")); + } else { + potentialJobs = this.entryObj.get("options.transferNodeName").equal(appName).or(this.entryObj.get("source.credId").equal(appName)).or(this.entryObj.get("destination.credId").equal(appName)); + } + + Collection jsonJobs = this.fileTransferScheduleMap.values(potentialJobs); + return jsonJobs.stream().map(hazelcastJsonValue -> { + try { + return this.objectMapper.readValue(hazelcastJsonValue.getValue(), TransferJobRequest.class); + } catch (JsonProcessingException ignored) { + } + return null; + }).collect(Collectors.toList()); + } + @Scheduled(cron = "0 0/10 * * * ?") public void measureCarbonOfPotentialJobs() { - Collection scheduledJobsJson = this.fileTransferScheduleMap.values(this.entryObj.get("options.transferNodeName").equal(this.appName)); - scheduledJobsJson.forEach(hazelcastJsonValue -> { + List potentialJobs = getPotentialJobsFromMap(); + potentialJobs.forEach(transferJobRequest -> { try { - TransferJobRequest transferJobRequest = this.objectMapper.readValue(hazelcastJsonValue.getValue(), TransferJobRequest.class); String sourceIp = ""; if (transferJobRequest.getSource().getVfsSourceCredential() != null) { sourceIp = ODSUtility.uriFromEndpointCredential(transferJobRequest.getSource().getVfsSourceCredential(), transferJobRequest.getSource().getType()); @@ -61,11 +85,12 @@ public void measureCarbonOfPotentialJobs() { } List sourceCarbonPerIp = this.pmeterParser.carbonPerIp(sourceIp); sourceCarbonPerIp.addAll(this.pmeterParser.carbonPerIp(destIp)); - this.carbonIntensityMap.put(transferJobRequest.getOwnerId() + "-" + transferJobRequest.getTransferNodeName() + "-" + transferJobRequest.getJobUuid().toString(), new HazelcastJsonValue(this.objectMapper.writeValueAsString(sourceCarbonPerIp))); + CarbonIntensityMapKey mapKey = new CarbonIntensityMapKey(transferJobRequest.getOwnerId(), transferJobRequest.getTransferNodeName(), transferJobRequest.getJobUuid(), LocalDateTime.now()); + this.carbonIntensityMap.put(new HazelcastJsonValue(this.objectMapper.writeValueAsString(mapKey)), new HazelcastJsonValue(this.objectMapper.writeValueAsString(sourceCarbonPerIp))); } catch (JsonProcessingException e) { - logger.error("Failed to parse job: {} \n Error received: \t {}", hazelcastJsonValue.getValue(), e.getMessage()); + logger.error("Failed to parse job: {} \n Error received: \t {}", transferJobRequest.toString(), e.getMessage()); } catch (IOException e) { - logger.error("Failed to measure ip: {} \n Error received: \t {}", hazelcastJsonValue.getValue(), e.getMessage()); + logger.error("Failed to measure ip: {} \n Error received: \t {}", transferJobRequest.toString(), e.getMessage()); } }); } diff --git a/src/main/java/org/onedatashare/transferservice/odstransferservice/service/FileTransferNodeRegistrationLifeCycleListener.java b/src/main/java/org/onedatashare/transferservice/odstransferservice/service/FileTransferNodeRegistrationLifeCycleListener.java index 5b2aaf2c..69fd4e21 100644 --- a/src/main/java/org/onedatashare/transferservice/odstransferservice/service/FileTransferNodeRegistrationLifeCycleListener.java +++ b/src/main/java/org/onedatashare/transferservice/odstransferservice/service/FileTransferNodeRegistrationLifeCycleListener.java @@ -60,7 +60,7 @@ public void stateChanged(LifecycleEvent event) { FileTransferNodeMetaData fileTransferNodeMetaData = this.objectMapper.readValue(jsonValue, FileTransferNodeMetaData.class); fileTransferNodeMetaData.setRunningJob(false); fileTransferNodeMetaData.setOnline(false); - logger.info("De-Registering client: {}",fileTransferNodeMetaData); + logger.info("De-Registering client: {}", fileTransferNodeMetaData); jsonValue = this.objectMapper.writeValueAsString(fileTransferNodeMetaData); this.fileTransferNodeMap.put(this.appName, new HazelcastJsonValue(jsonValue)); } catch (JsonProcessingException e) { diff --git a/src/main/java/org/onedatashare/transferservice/odstransferservice/service/FileTransferNodeRegistrationService.java b/src/main/java/org/onedatashare/transferservice/odstransferservice/service/FileTransferNodeRegistrationService.java index e1a80122..9040d059 100644 --- a/src/main/java/org/onedatashare/transferservice/odstransferservice/service/FileTransferNodeRegistrationService.java +++ b/src/main/java/org/onedatashare/transferservice/odstransferservice/service/FileTransferNodeRegistrationService.java @@ -37,7 +37,7 @@ public FileTransferNodeRegistrationService(HazelcastInstance hazelcastInstance, @SneakyThrows @PostConstruct - public void init(){ + public void init() { this.updateRegistrationInHazelcast(null); } @@ -49,7 +49,7 @@ public void updateRegistrationInHazelcast(JobExecution jobExecution) throws Json metaDataBuilder.jobUuid(new UUID(0, 0)); } else { metaDataBuilder.jobId(jobExecution.getJobId()); - metaDataBuilder.runningJob(true); + metaDataBuilder.runningJob(jobExecution.isRunning()); metaDataBuilder.jobUuid(UUID.fromString(jobExecution.getJobParameters().getString(ODSConstants.JOB_UUID))); } metaDataBuilder.online(true); diff --git a/src/main/java/org/onedatashare/transferservice/odstransferservice/service/JobCompletionListener.java b/src/main/java/org/onedatashare/transferservice/odstransferservice/service/JobCompletionListener.java index 4312fbc9..c3f21cde 100644 --- a/src/main/java/org/onedatashare/transferservice/odstransferservice/service/JobCompletionListener.java +++ b/src/main/java/org/onedatashare/transferservice/odstransferservice/service/JobCompletionListener.java @@ -24,15 +24,12 @@ public class JobCompletionListener implements JobExecutionListener { MetricsCollector metricsCollector; - boolean optimizerEnable; - @Autowired FileTransferNodeRegistrationService fileTransferNodeRegistrationService; public JobCompletionListener(MetricsCollector metricsCollector, ConnectionBag connectionBag, ThreadPoolContract threadPool, Set jobIds) { this.metricsCollector = metricsCollector; this.connectionBag = connectionBag; - this.optimizerEnable = false; this.threadPool = threadPool; this.jobIds = jobIds; } @@ -59,7 +56,7 @@ public void afterJob(JobExecution jobExecution) { this.threadPool.clearPools(); System.gc(); try { - this.fileTransferNodeRegistrationService.updateRegistrationInHazelcast(null); + this.fileTransferNodeRegistrationService.updateRegistrationInHazelcast(jobExecution); } catch (JsonProcessingException e) { logger.error("Failed to update status of FTN inside of Hazelcast for job end. Exception \n {}", e.getMessage()); } diff --git a/src/main/java/org/onedatashare/transferservice/odstransferservice/service/PmeterParser.java b/src/main/java/org/onedatashare/transferservice/odstransferservice/service/PmeterParser.java index 9000d483..0fdb231c 100644 --- a/src/main/java/org/onedatashare/transferservice/odstransferservice/service/PmeterParser.java +++ b/src/main/java/org/onedatashare/transferservice/odstransferservice/service/PmeterParser.java @@ -19,13 +19,15 @@ import java.io.ByteArrayOutputStream; import java.io.IOException; +import java.net.Inet6Address; +import java.net.InetAddress; +import java.net.InetSocketAddress; +import java.net.NetworkInterface; +import java.nio.channels.SocketChannel; import java.nio.file.Files; import java.nio.file.Path; import java.nio.file.Paths; -import java.util.ArrayList; -import java.util.LinkedHashMap; -import java.util.List; -import java.util.Map; +import java.util.*; @Service public class PmeterParser { @@ -35,6 +37,7 @@ public class PmeterParser { private final PumpStreamHandler streamHandler; private final DefaultExecutor pmeterExecutor; private final ExecuteWatchdog watchDog; + private String pmeterNic; Logger logger = LoggerFactory.getLogger(PmeterParser.class); @@ -47,9 +50,6 @@ public class PmeterParser { @Value("${pmeter.report.path}") String pmeterMetricsPath; - @Value("${pmeter.interface}") - String pmeterNic; - @Value("${ods.user}") String odsUser; @@ -66,8 +66,9 @@ public class PmeterParser { private CommandLine cmdLine; @PostConstruct - public void init() { - this.cmdLine = CommandLine.parse(String.format("pmeter " + MEASURE + " %s --user %s --measure %s %s --file_name %s", pmeterNic, odsUser, measureCount, pmeterOptions, pmeterMetricsPath)); + public void init() throws IOException { + this.pmeterNic = this.discoverActiveNetworkInterface(); + this.cmdLine = CommandLine.parse(String.format("pmeter " + MEASURE + " %s --user %s --measure %s %s --file_name %s", this.pmeterNic, odsUser, measureCount, pmeterOptions, pmeterMetricsPath)); } public PmeterParser() { @@ -82,6 +83,7 @@ public PmeterParser() { this.pmeterMapper = new ObjectMapper(); this.pmeterMapper.registerModule(new JavaTimeModule()); this.pmeterMapper.configure(SerializationFeature.WRITE_DATE_KEYS_AS_TIMESTAMPS, false); + } @@ -156,4 +158,50 @@ public List carbonPerIp(String ip) throws IOException { filePath.toFile().createNewFile(); return retList; } + + private String discoverActiveNetworkInterface() throws IOException { + // iterate over the network interfaces known to java + Enumeration interfaces = NetworkInterface.getNetworkInterfaces(); + for (NetworkInterface interface_ : Collections.list(interfaces)) { + // we shouldn't care about loopback addresses + if (interface_.isLoopback()) + continue; + + // if you don't expect the interface to be up you can skip this + // though it would question the usability of the rest of the code + if (!interface_.isUp()) + continue; + + // iterate over the addresses associated with the interface + Enumeration addresses = interface_.getInetAddresses(); + for (InetAddress address : Collections.list(addresses)) { + // look only for ipv4 addresses + logger.info(address.getHostAddress()); + if (address instanceof Inet6Address) + continue; + + // use a timeout big enough for your needs + if (!address.isReachable(3000)) + continue; + + // java 7's try-with-resources statement, so that + // we close the socket immediately after use + try (SocketChannel socket = SocketChannel.open()) { + // again, use a big enough timeout + socket.socket().setSoTimeout(3000); + + // bind the socket to your local interface + socket.bind(new InetSocketAddress(address, 8080)); + + // try to connect to *somewhere* + socket.connect(new InetSocketAddress("onedatashare.org", 80)); + } catch (IOException ex) { + continue; + } + logger.info("Interface used for Transfer-Service: {}", interface_.getDisplayName()); + return interface_.getDisplayName(); + } + } + return ""; + } } diff --git a/src/main/java/org/onedatashare/transferservice/odstransferservice/service/VaultSSLService.java b/src/main/java/org/onedatashare/transferservice/odstransferservice/service/VaultSSLService.java new file mode 100644 index 00000000..281c1f4a --- /dev/null +++ b/src/main/java/org/onedatashare/transferservice/odstransferservice/service/VaultSSLService.java @@ -0,0 +1,153 @@ +package org.onedatashare.transferservice.odstransferservice.service; + +import com.hazelcast.nio.ssl.SSLContextFactory; +import jakarta.annotation.PostConstruct; +import lombok.Getter; +import lombok.SneakyThrows; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.springframework.core.env.Environment; +import org.springframework.stereotype.Service; +import org.springframework.vault.core.VaultPkiOperations; +import org.springframework.vault.core.VaultTemplate; +import org.springframework.vault.support.VaultIssuerCertificateRequestResponse; + +import javax.net.ssl.SSLContext; +import javax.net.ssl.TrustManager; +import javax.net.ssl.TrustManagerFactory; +import java.io.IOException; +import java.io.InputStream; +import java.io.OutputStream; +import java.nio.file.Files; +import java.nio.file.Path; +import java.nio.file.Paths; +import java.nio.file.StandardOpenOption; +import java.security.KeyManagementException; +import java.security.KeyStore; +import java.security.KeyStoreException; +import java.security.NoSuchAlgorithmException; +import java.security.cert.CertificateException; +import java.security.cert.CertificateExpiredException; +import java.security.cert.CertificateNotYetValidException; +import java.security.cert.X509Certificate; +import java.time.Duration; +import java.util.Enumeration; +import java.util.Properties; +import java.util.concurrent.Executors; +import java.util.concurrent.ScheduledExecutorService; + +@Service +public class VaultSSLService implements SSLContextFactory { + + private final VaultPkiOperations vaultPkiOperations; + @Getter + private final Path storePath; + private final Logger logger = LoggerFactory.getLogger(VaultSSLService.class); + private final String keyStorePassword; + private final ScheduledExecutorService scheduler; + + @Getter + public Duration storeDuration; + + SSLContext sslContext; + + + public VaultSSLService(Environment environment, VaultTemplate vaultTemplate) { + this.vaultPkiOperations = vaultTemplate.opsForPki(); + this.storePath = Paths.get(System.getProperty("user.home"), "onedatashare", "ftn", "store", "jobscheduler.truststore.p12"); + this.keyStorePassword = environment.getProperty("hz.keystore.password", "changeit"); + this.storeDuration = Duration.ofMinutes(1); + this.scheduler = Executors.newScheduledThreadPool(0, Thread.ofVirtual().factory()); + + } + + @PostConstruct + public void init() { + scheduler.scheduleAtFixedRate(() -> { + try { + logger.info("Running Certificate CRON"); + refreshCerts(); + } catch (Exception e) { + logger.error(e.getMessage()); + } + }, 0, Duration.ofMinutes(1).toSeconds(), java.util.concurrent.TimeUnit.SECONDS); + } + + public void refreshCerts() throws KeyStoreException, IOException, NoSuchAlgorithmException, KeyManagementException { + logger.info("Refreshing Certificates"); + KeyStore trustStore = this.readInTrustStore(); + boolean hasValidCerts = this.checkIfCertsAreStillValid(trustStore); + logger.info("Certs are valid: {}", hasValidCerts); + if (trustStore == null || !hasValidCerts) { + VaultIssuerCertificateRequestResponse resp = this.vaultPkiOperations.getIssuerCertificate("7022f845-246c-3836-836f-83936e50b888"); + trustStore = resp.getData().createTrustStore(true); + this.persistStore(trustStore); + } + + TrustManagerFactory tmf = TrustManagerFactory.getInstance("SunX509"); + tmf.init(trustStore); + + this.sslContext = SSLContext.getInstance("TLSv1.2"); + this.sslContext.init(null, tmf.getTrustManagers(), null); + } + + + private KeyStore readInTrustStore() throws KeyStoreException { + if (Files.exists(storePath)) { + KeyStore keyStore = KeyStore.getInstance("PKCS12"); + try (InputStream inputStream = Files.newInputStream(storePath, StandardOpenOption.READ)) { + keyStore.load(inputStream, this.keyStorePassword.toCharArray()); + return keyStore; + } catch (IOException | CertificateException | NoSuchAlgorithmException e) { + return null; + } + } + return null; + } + + private boolean checkIfCertsAreStillValid(KeyStore keyStore) throws KeyStoreException { + if (keyStore == null) return false; + Enumeration aliases = keyStore.aliases(); + while (aliases.hasMoreElements()) { + String alias = aliases.nextElement(); + X509Certificate certificate = (X509Certificate) keyStore.getCertificate(alias); + try { + certificate.checkValidity(); + } catch (CertificateExpiredException | CertificateNotYetValidException e) { + return false; + } + } + return true; + } + + private void persistStore(KeyStore store) throws IOException, KeyStoreException, NoSuchAlgorithmException { + if (!Files.exists(storePath)) { + Files.createDirectories(storePath.getParent()); + Files.createFile(storePath); + } + try (OutputStream outputStream = Files.newOutputStream(storePath, StandardOpenOption.WRITE, StandardOpenOption.CREATE)) { + logger.debug("Persisting the KeyStore to {}", storePath); + try { + store.store(outputStream, this.keyStorePassword.toCharArray()); + } catch (CertificateException e) { + } + } + } + + @Override + public void init(Properties properties) throws Exception { + } + + @Override + public SSLContext getSSLContext() { + if(this.sslContext == null) { + try { + this.refreshCerts(); + } catch (KeyStoreException | IOException | NoSuchAlgorithmException | KeyManagementException e) { + logger.error(e.getMessage()); + throw new RuntimeException(e); + } + } + return this.sslContext; + } +} diff --git a/src/main/java/org/onedatashare/transferservice/odstransferservice/service/step/AmazonS3/AmazonS3Reader.java b/src/main/java/org/onedatashare/transferservice/odstransferservice/service/step/AmazonS3/AmazonS3Reader.java index 21bc5018..36f6e87a 100644 --- a/src/main/java/org/onedatashare/transferservice/odstransferservice/service/step/AmazonS3/AmazonS3Reader.java +++ b/src/main/java/org/onedatashare/transferservice/odstransferservice/service/step/AmazonS3/AmazonS3Reader.java @@ -14,7 +14,6 @@ import org.onedatashare.transferservice.odstransferservice.pools.S3ConnectionPool; import org.onedatashare.transferservice.odstransferservice.service.FilePartitioner; import org.onedatashare.transferservice.odstransferservice.utility.ODSUtility; -import org.onedatashare.transferservice.odstransferservice.utility.S3Utility; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.batch.core.StepExecution; @@ -48,7 +47,7 @@ public AmazonS3Reader(AccountEndpointCredential sourceCredential, EntityInfo fil @BeforeStep public void beforeStep(StepExecution stepExecution) { this.fileName = this.fileInfo.getId();//For an S3 Reader job this should be the object key - this.amazonS3URI = new AmazonS3URI(S3Utility.constructS3URI(this.sourceCredential.getUri(), this.fileName)); + this.amazonS3URI = new AmazonS3URI(ODSUtility.constructS3URI(this.sourceCredential.getUri(), this.fileName)); this.getSkeleton = new GetObjectRequest(this.amazonS3URI.getBucket(), this.amazonS3URI.getKey()); logger.info("Starting S3 job for file {} with uri {}", this.fileName, this.amazonS3URI); } diff --git a/src/main/java/org/onedatashare/transferservice/odstransferservice/service/step/DynamicStep.java b/src/main/java/org/onedatashare/transferservice/odstransferservice/service/step/DynamicStep.java deleted file mode 100644 index 38fb5330..00000000 --- a/src/main/java/org/onedatashare/transferservice/odstransferservice/service/step/DynamicStep.java +++ /dev/null @@ -1,28 +0,0 @@ -package org.onedatashare.transferservice.odstransferservice.service.step; - -import org.springframework.batch.core.JobInterruptedException; -import org.springframework.batch.core.Step; -import org.springframework.batch.core.StepExecution; - -public class DynamicStep implements Step { - - @Override - public String getName() { - return null; - } - - @Override - public boolean isAllowStartIfComplete() { - return false; - } - - @Override - public int getStartLimit() { - return 0; - } - - @Override - public void execute(StepExecution stepExecution) throws JobInterruptedException { - - } -} diff --git a/src/main/java/org/onedatashare/transferservice/odstransferservice/service/step/Processor.java b/src/main/java/org/onedatashare/transferservice/odstransferservice/service/step/Processor.java deleted file mode 100644 index ff8dd22d..00000000 --- a/src/main/java/org/onedatashare/transferservice/odstransferservice/service/step/Processor.java +++ /dev/null @@ -1,19 +0,0 @@ -package org.onedatashare.transferservice.odstransferservice.service.step; - -import org.onedatashare.transferservice.odstransferservice.model.DataChunk; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; -import org.springframework.batch.item.ItemProcessor; -import org.springframework.stereotype.Component; - -@Component -public class Processor implements ItemProcessor { - - Logger logger = LoggerFactory.getLogger(Processor.class); - - @Override - public DataChunk process(DataChunk dc) throws Exception { -// System.out.println("Processor :"+dc.getData()); - return dc; - } -} diff --git a/src/main/java/org/onedatashare/transferservice/odstransferservice/utility/S3Utility.java b/src/main/java/org/onedatashare/transferservice/odstransferservice/utility/S3Utility.java deleted file mode 100644 index c4f429da..00000000 --- a/src/main/java/org/onedatashare/transferservice/odstransferservice/utility/S3Utility.java +++ /dev/null @@ -1,28 +0,0 @@ -package org.onedatashare.transferservice.odstransferservice.utility; - -import com.amazonaws.auth.AWSCredentials; -import com.amazonaws.auth.AWSStaticCredentialsProvider; -import com.amazonaws.auth.BasicAWSCredentials; -import com.amazonaws.services.s3.AmazonS3; -import com.amazonaws.services.s3.AmazonS3ClientBuilder; -import org.onedatashare.transferservice.odstransferservice.model.credential.AccountEndpointCredential; - -public class S3Utility { - - public static AmazonS3 constructClient(AccountEndpointCredential credential, String region){ - AWSCredentials credentials = new BasicAWSCredentials(credential.getUsername(), credential.getSecret()); - return AmazonS3ClientBuilder.standard() - .withCredentials(new AWSStaticCredentialsProvider(credentials)) - .withRegion(region) - .build(); - } - - public static String constructS3URI(String uri, String fileKey){ - StringBuilder builder = new StringBuilder(); - String[] temp = uri.split(":::"); - String bucketName = temp[1]; - String region = temp[0]; - builder.append("https://").append(bucketName).append(".").append("s3.").append(region).append(".").append("amazonaws.com/").append(fileKey); - return builder.toString(); - } -} diff --git a/src/main/resources/application.properties b/src/main/resources/application.properties index 45fa63aa..7a7efd07 100644 --- a/src/main/resources/application.properties +++ b/src/main/resources/application.properties @@ -3,10 +3,8 @@ ods.user=${USER_NAME} server.port=8092 server.shutdown=graceful spring.main.allow-bean-definition-overriding=true - gdrive.client.id=${ODS_GDRIVE_CLIENT_ID} gdrive.client.secret=${ODS_GDRIVE_CLIENT_SECRET} - #Eureka config #eureka.client.enabled=true #eureka.client.serviceUrl.defaultZone=http://${EUREKA_USER:admin}:${EUREKA_PASS:admin}@${EUREKA_URI:localhost:8090}/eureka @@ -14,18 +12,14 @@ gdrive.client.secret=${ODS_GDRIVE_CLIENT_SECRET} #eureka.client.registry-fetch-interval-seconds=5 #eureka.instance.leaseRenewalIntervalInSeconds=10 #eureka.instance.metadata-map.startup=${random.int} - #SBA management.endpoints.web.exposure.include=* - #Ignore non-null values spring.jackson.default-property-inclusion=NON_NULL spring.batch.job.enabled=false - #pmeter pmeter.report.path=${PMETER_REPORT_PATH:${HOME}/.pmeter/transfer_service_pmeter_measure.txt} pmeter.cron.run=${ENABLE_PMETER:false} -pmeter.interface=${PMETER_NIC_INTERFACE:awdl0} pmeter.measure=${PMETER_MEASURE:1} pmeter.options=${PMETER_CLI_OPTIONS:"-KNS"} pmeter.cron.expression=${PMETER_CRON_EXP:*/5 * * * * *} @@ -42,3 +36,5 @@ ods.influx.org=${INFLUX_ORG:OneDataShare} transfer.service.concurrency=${MAX_CONCURRENCY:32} transfer.service.parallelism=${MAX_PARALLELISM:32} transfer.service.pipelining=${MAX_PIPELINING:32} + +hz.keystore.password=changeit diff --git a/src/test/java/org/onedatashare/transferservice/odstransferservice/service/CarbonJobMeasureTest.java b/src/test/java/org/onedatashare/transferservice/odstransferservice/service/CarbonJobMeasureTest.java new file mode 100644 index 00000000..da189f01 --- /dev/null +++ b/src/test/java/org/onedatashare/transferservice/odstransferservice/service/CarbonJobMeasureTest.java @@ -0,0 +1,69 @@ +package org.onedatashare.transferservice.odstransferservice.service; + +import com.fasterxml.jackson.core.JsonProcessingException; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.hazelcast.core.Hazelcast; +import com.hazelcast.core.HazelcastInstance; +import com.hazelcast.core.HazelcastJsonValue; +import com.hazelcast.map.IMap; +import org.junit.Assert; +import org.junit.jupiter.api.BeforeAll; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.mockito.Mock; +import org.onedatashare.transferservice.odstransferservice.model.TransferJobRequest; +import org.onedatashare.transferservice.odstransferservice.model.TransferOptions; +import org.springframework.test.util.ReflectionTestUtils; + +import java.util.UUID; + +public class CarbonJobMeasureTest { + + CarbonJobMeasure testObj; + static IMap carbonIntensityMap; + static IMap fileTransferScheduleMap; + static ObjectMapper objectMapper; + @Mock + PmeterParser pmeterParser; + + @BeforeAll + public static void beforeAllTests() { + HazelcastInstance hazelcastInstance = Hazelcast.newHazelcastInstance(); + carbonIntensityMap = hazelcastInstance.getMap("carbon-intensity-map"); + fileTransferScheduleMap = hazelcastInstance.getMap("file-transfer-schedule-map"); + objectMapper = new ObjectMapper(); + } + + @BeforeEach + public void beforeEachTest() { + testObj = new CarbonJobMeasure(carbonIntensityMap, fileTransferScheduleMap, pmeterParser, objectMapper); + ReflectionTestUtils.setField(testObj, "appName", "odsNode"); + ReflectionTestUtils.setField(testObj, "odsUser", "odsNode"); + } + + @Test + public void testEmptyMapsDefault() { + testObj.measureCarbonOfPotentialJobs(); + Assert.assertEquals( 0, carbonIntensityMap.size()); + } + + @Test + public void testOneJobInMapForThisNode() throws JsonProcessingException { + TransferJobRequest jobRequest = new TransferJobRequest(); + jobRequest.setJobUuid(UUID.randomUUID()); + jobRequest.setOptions(new TransferOptions()); + jobRequest.setSource(new TransferJobRequest.Source()); + jobRequest.setDestination(new TransferJobRequest.Destination()); + jobRequest.setOwnerId("jgoldverg@gmail.com"); + jobRequest.setTransferNodeName("odsNode"); + String jsonJob = objectMapper.writeValueAsString(jobRequest); + fileTransferScheduleMap.put(jobRequest.getJobUuid(), new HazelcastJsonValue(jsonJob)); + testObj.measureCarbonOfPotentialJobs(); + Assert.assertEquals(1, carbonIntensityMap.size()); + } + + + + + +} From 94be8a9321552fb2a33f37294cf31030694e7b16 Mon Sep 17 00:00:00 2001 From: Jacob Goldverg Date: Mon, 28 Oct 2024 08:33:34 -0400 Subject: [PATCH 129/150] removing the unnessary sslContext --- .../service/VaultSSLService.java | 30 +------------------ 1 file changed, 1 insertion(+), 29 deletions(-) diff --git a/src/main/java/org/onedatashare/transferservice/odstransferservice/service/VaultSSLService.java b/src/main/java/org/onedatashare/transferservice/odstransferservice/service/VaultSSLService.java index 281c1f4a..3eecec49 100644 --- a/src/main/java/org/onedatashare/transferservice/odstransferservice/service/VaultSSLService.java +++ b/src/main/java/org/onedatashare/transferservice/odstransferservice/service/VaultSSLService.java @@ -1,9 +1,7 @@ package org.onedatashare.transferservice.odstransferservice.service; -import com.hazelcast.nio.ssl.SSLContextFactory; import jakarta.annotation.PostConstruct; import lombok.Getter; -import lombok.SneakyThrows; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.core.env.Environment; @@ -13,8 +11,6 @@ import org.springframework.vault.support.VaultIssuerCertificateRequestResponse; import javax.net.ssl.SSLContext; -import javax.net.ssl.TrustManager; -import javax.net.ssl.TrustManagerFactory; import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; @@ -32,12 +28,11 @@ import java.security.cert.X509Certificate; import java.time.Duration; import java.util.Enumeration; -import java.util.Properties; import java.util.concurrent.Executors; import java.util.concurrent.ScheduledExecutorService; @Service -public class VaultSSLService implements SSLContextFactory { +public class VaultSSLService { private final VaultPkiOperations vaultPkiOperations; @Getter @@ -83,12 +78,6 @@ public void refreshCerts() throws KeyStoreException, IOException, NoSuchAlgorith trustStore = resp.getData().createTrustStore(true); this.persistStore(trustStore); } - - TrustManagerFactory tmf = TrustManagerFactory.getInstance("SunX509"); - tmf.init(trustStore); - - this.sslContext = SSLContext.getInstance("TLSv1.2"); - this.sslContext.init(null, tmf.getTrustManagers(), null); } @@ -133,21 +122,4 @@ private void persistStore(KeyStore store) throws IOException, KeyStoreException, } } } - - @Override - public void init(Properties properties) throws Exception { - } - - @Override - public SSLContext getSSLContext() { - if(this.sslContext == null) { - try { - this.refreshCerts(); - } catch (KeyStoreException | IOException | NoSuchAlgorithmException | KeyManagementException e) { - logger.error(e.getMessage()); - throw new RuntimeException(e); - } - } - return this.sslContext; - } } From 9a317be13b03ae08b941c501fda8293574bbadf5 Mon Sep 17 00:00:00 2001 From: Jacob Goldverg Date: Mon, 28 Oct 2024 08:34:01 -0400 Subject: [PATCH 130/150] updated a few things out --- .../step/googleDrive/GDriveSimpleWriter.java | 15 ++++++------ .../utility/ODSUtility.java | 23 ++++++++----------- 2 files changed, 17 insertions(+), 21 deletions(-) diff --git a/src/main/java/org/onedatashare/transferservice/odstransferservice/service/step/googleDrive/GDriveSimpleWriter.java b/src/main/java/org/onedatashare/transferservice/odstransferservice/service/step/googleDrive/GDriveSimpleWriter.java index d9096efc..2b15ad85 100644 --- a/src/main/java/org/onedatashare/transferservice/odstransferservice/service/step/googleDrive/GDriveSimpleWriter.java +++ b/src/main/java/org/onedatashare/transferservice/odstransferservice/service/step/googleDrive/GDriveSimpleWriter.java @@ -6,7 +6,7 @@ import org.onedatashare.transferservice.odstransferservice.constant.ODSConstants; import org.onedatashare.transferservice.odstransferservice.model.DataChunk; import org.onedatashare.transferservice.odstransferservice.model.EntityInfo; -import org.onedatashare.transferservice.odstransferservice.model.FileBuffer; +import org.onedatashare.transferservice.odstransferservice.model.SmallFileUpload; import org.onedatashare.transferservice.odstransferservice.model.credential.OAuthEndpointCredential; import org.onedatashare.transferservice.odstransferservice.utility.ODSUtility; import org.slf4j.Logger; @@ -32,7 +32,7 @@ public class GDriveSimpleWriter implements ItemWriter { private final OAuthEndpointCredential credential; Drive client; private String basePath; - FileBuffer fileBuffer; + SmallFileUpload smallFileUpload; private String fileName; private String mimeType; private File fileMetaData; @@ -41,7 +41,7 @@ public class GDriveSimpleWriter implements ItemWriter { public GDriveSimpleWriter(OAuthEndpointCredential credential, EntityInfo fileInfo) { this.credential = credential; this.fileInfo = fileInfo; - this.fileBuffer = new FileBuffer(); + this.smallFileUpload = new SmallFileUpload(); } @BeforeStep @@ -60,7 +60,7 @@ public void beforeWrite(List items) { @Override public void write(Chunk items) { - fileBuffer.addAllChunks(items.getItems()); + this.smallFileUpload.addAllChunks(items.getItems()); } @@ -68,7 +68,7 @@ public void write(Chunk items) { public void afterStep() throws Exception { try { logger.debug("Transferring file to the server"); - InputStream inputStream = this.fileBuffer.condenseListToOneStream(this.fileInfo.getSize()); + InputStream inputStream = this.smallFileUpload.condenseListToOneStream(); InputStreamContent inputStreamContent = new InputStreamContent(this.mimeType, inputStream); this.fileMetaData = new File() .setName(this.fileName) @@ -86,8 +86,7 @@ public void afterStep() throws Exception { throw e; } this.client = null; - this.fileBuffer.clear(); - this.fileBuffer = null; - + this.smallFileUpload.clearBuffer(); + this.smallFileUpload = null; } } diff --git a/src/main/java/org/onedatashare/transferservice/odstransferservice/utility/ODSUtility.java b/src/main/java/org/onedatashare/transferservice/odstransferservice/utility/ODSUtility.java index 13673822..8f31d52b 100644 --- a/src/main/java/org/onedatashare/transferservice/odstransferservice/utility/ODSUtility.java +++ b/src/main/java/org/onedatashare/transferservice/odstransferservice/utility/ODSUtility.java @@ -36,16 +36,6 @@ public class ODSUtility { public static DbxRequestConfig dbxRequestConfig = DbxRequestConfig.newBuilder(odsClientID).build(); - public static DataChunk makeChunk(int size, byte[] data, int startPosition, int chunkIdx, String fileName) { - DataChunk dataChunk = new DataChunk(); - dataChunk.setStartPosition(startPosition); - dataChunk.setChunkIdx(chunkIdx); - dataChunk.setFileName(fileName); - dataChunk.setData(data); - dataChunk.setSize(size); - return dataChunk; - } - public static DataChunk makeChunk(long size, byte[] data, long startPosition, int chunkIdx, String fileName) { DataChunk dataChunk = new DataChunk(); dataChunk.setStartPosition(startPosition); @@ -57,8 +47,6 @@ public static DataChunk makeChunk(long size, byte[] data, long startPosition, in } public static Drive authenticateDriveClient(OAuthEndpointCredential oauthCred) throws GeneralSecurityException, IOException { - System.out.println(gDriveClientId); - System.out.println(gDriveClientSecret); GoogleCredential credential1 = new GoogleCredential.Builder().setJsonFactory(GsonFactory.getDefaultInstance()) .setClientSecrets(gDriveClientId, gDriveClientSecret) .setTransport(GoogleNetHttpTransport.newTrustedTransport()).build(); @@ -123,7 +111,7 @@ public static String uriFromEndpointCredential(EndpointCredential credential, En return uri.getHost(); case s3: ac = (AccountEndpointCredential) credential; - URI s3Uri = URI.create(S3Utility.constructS3URI(ac.getUri(), "")); + URI s3Uri = URI.create(constructS3URI(ac.getUri(), "")); return s3Uri.getHost(); case box: return "box.com"; @@ -135,4 +123,13 @@ public static String uriFromEndpointCredential(EndpointCredential credential, En return ""; } } + + public static String constructS3URI(String uri, String fileKey) { + StringBuilder builder = new StringBuilder(); + String[] temp = uri.split(":::"); + String bucketName = temp[1]; + String region = temp[0]; + builder.append("https://").append(bucketName).append(".").append("s3.").append(region).append(".").append("amazonaws.com/").append(fileKey); + return builder.toString(); + } } From 862a550a58ae6623628479e74dfc88a77dea0e13 Mon Sep 17 00:00:00 2001 From: Jacob Goldverg Date: Wed, 30 Oct 2024 20:23:38 -0400 Subject: [PATCH 131/150] Fully incorporated hazelcast --- .../odstransferservice/service/VaultSSLService.java | 4 ---- src/main/resources/application.properties | 2 +- 2 files changed, 1 insertion(+), 5 deletions(-) diff --git a/src/main/java/org/onedatashare/transferservice/odstransferservice/service/VaultSSLService.java b/src/main/java/org/onedatashare/transferservice/odstransferservice/service/VaultSSLService.java index 3eecec49..25818dd4 100644 --- a/src/main/java/org/onedatashare/transferservice/odstransferservice/service/VaultSSLService.java +++ b/src/main/java/org/onedatashare/transferservice/odstransferservice/service/VaultSSLService.java @@ -10,7 +10,6 @@ import org.springframework.vault.core.VaultTemplate; import org.springframework.vault.support.VaultIssuerCertificateRequestResponse; -import javax.net.ssl.SSLContext; import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; @@ -44,9 +43,6 @@ public class VaultSSLService { @Getter public Duration storeDuration; - SSLContext sslContext; - - public VaultSSLService(Environment environment, VaultTemplate vaultTemplate) { this.vaultPkiOperations = vaultTemplate.opsForPki(); this.storePath = Paths.get(System.getProperty("user.home"), "onedatashare", "ftn", "store", "jobscheduler.truststore.p12"); diff --git a/src/main/resources/application.properties b/src/main/resources/application.properties index 7a7efd07..062fc6a6 100644 --- a/src/main/resources/application.properties +++ b/src/main/resources/application.properties @@ -1,4 +1,4 @@ -spring.application.name=${APP_NAME:TransferService} +spring.application.name=${APP_NAME:ODSTransferService} ods.user=${USER_NAME} server.port=8092 server.shutdown=graceful From ac8afede6d41d0dbdfff019dfcf1cf33841e8156 Mon Sep 17 00:00:00 2001 From: Jacob Goldverg Date: Mon, 11 Nov 2024 23:26:07 -0500 Subject: [PATCH 132/150] Hazelcast consumer and carbon monitoring is fully working --- .../config/BatchConfig.java | 2 +- .../config/HazelcastClientConfig.java | 12 +- .../controller/JobMonitor.java | 6 +- .../model/CarbonMeasurement.java | 17 +++ .../service/CarbonJobMeasure.java | 36 +++-- ...sferNodeRegistrationLifeCycleListener.java | 130 ++++++++---------- .../service/HazelcastConsumer.java | 9 +- .../service/InfluxIOService.java | 21 ++- .../service/PmeterParser.java | 6 +- .../service/VaultSSLService.java | 2 +- .../service/step/http/HttpReader.java | 1 - src/main/resources/application.properties | 16 +-- .../service/CarbonJobMeasureTest.java | 7 +- .../service/PmeterParserTest.java | 24 ++++ 14 files changed, 164 insertions(+), 125 deletions(-) create mode 100644 src/main/java/org/onedatashare/transferservice/odstransferservice/model/CarbonMeasurement.java create mode 100644 src/test/java/org/onedatashare/transferservice/odstransferservice/service/PmeterParserTest.java diff --git a/src/main/java/org/onedatashare/transferservice/odstransferservice/config/BatchConfig.java b/src/main/java/org/onedatashare/transferservice/odstransferservice/config/BatchConfig.java index 436ddf52..1d0d6ecb 100644 --- a/src/main/java/org/onedatashare/transferservice/odstransferservice/config/BatchConfig.java +++ b/src/main/java/org/onedatashare/transferservice/odstransferservice/config/BatchConfig.java @@ -27,7 +27,7 @@ public JobLauncher jobLauncher(JobRepository jobRepository) { TaskExecutorJobLauncher jobLauncher = new TaskExecutorJobLauncher(); jobLauncher.setJobRepository(jobRepository); SimpleAsyncTaskExecutor taskExecutor = new SimpleAsyncTaskExecutor(); - taskExecutor.setConcurrencyLimit(1); + taskExecutor.setConcurrencyLimit(4); jobLauncher.setTaskExecutor(taskExecutor); return jobLauncher; } diff --git a/src/main/java/org/onedatashare/transferservice/odstransferservice/config/HazelcastClientConfig.java b/src/main/java/org/onedatashare/transferservice/odstransferservice/config/HazelcastClientConfig.java index 45e04a22..1af6c30e 100644 --- a/src/main/java/org/onedatashare/transferservice/odstransferservice/config/HazelcastClientConfig.java +++ b/src/main/java/org/onedatashare/transferservice/odstransferservice/config/HazelcastClientConfig.java @@ -4,11 +4,11 @@ import com.hazelcast.client.HazelcastClient; import com.hazelcast.client.config.ClientConfig; import com.hazelcast.collection.IQueue; +import com.hazelcast.config.IndexType; import com.hazelcast.config.SSLConfig; import com.hazelcast.core.HazelcastInstance; import com.hazelcast.core.HazelcastJsonValue; import com.hazelcast.map.IMap; -import org.onedatashare.transferservice.odstransferservice.service.FileTransferNodeRegistrationLifeCycleListener; import org.onedatashare.transferservice.odstransferservice.service.VaultSSLService; import org.springframework.beans.factory.annotation.Qualifier; import org.springframework.beans.factory.annotation.Value; @@ -19,6 +19,7 @@ import org.springframework.vault.core.VaultTemplate; import java.util.Properties; +import java.util.Set; import java.util.UUID; @Configuration @@ -72,16 +73,13 @@ public SSLConfig sslConfig() { SSLConfig sslConfig = new SSLConfig(); sslConfig.setEnabled(true); sslConfig.setProperties(properties); -// sslConfig.setFactoryImplementation(this.vaultSslService); return sslConfig; } @Bean public HazelcastInstance hazelcastInstance(ClientConfig clientConfig) { - HazelcastInstance hazelcastInstance = HazelcastClient.newHazelcastClient(clientConfig); - FileTransferNodeRegistrationLifeCycleListener fileTransferNodeRegistrationEventListener = new FileTransferNodeRegistrationLifeCycleListener(hazelcastInstance, env, objectMapper); - hazelcastInstance.getLifecycleService().addLifecycleListener(fileTransferNodeRegistrationEventListener); - return hazelcastInstance; + clientConfig.addLabel(this.env.getProperty("spring.application.name")); + return HazelcastClient.newHazelcastClient(clientConfig); } @Bean @@ -95,7 +93,7 @@ public IMap fileTransferScheduleMap(@Qualifier("hazelc } @Bean - public IMap carbsonIntensityMap(@Qualifier("hazelcastInstance") HazelcastInstance hazelcastInstance) { + public IMap carbonIntensityMap(@Qualifier("hazelcastInstance") HazelcastInstance hazelcastInstance) { return hazelcastInstance.getMap("carbon-intensity-map"); } diff --git a/src/main/java/org/onedatashare/transferservice/odstransferservice/controller/JobMonitor.java b/src/main/java/org/onedatashare/transferservice/odstransferservice/controller/JobMonitor.java index 031d58b1..ff0af68e 100644 --- a/src/main/java/org/onedatashare/transferservice/odstransferservice/controller/JobMonitor.java +++ b/src/main/java/org/onedatashare/transferservice/odstransferservice/controller/JobMonitor.java @@ -1,6 +1,7 @@ package org.onedatashare.transferservice.odstransferservice.controller; import org.onedatashare.transferservice.odstransferservice.model.BatchJobData; +import org.onedatashare.transferservice.odstransferservice.service.JobControl; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.batch.core.JobExecution; @@ -11,6 +12,7 @@ import org.springframework.web.bind.annotation.RequestParam; import org.springframework.web.bind.annotation.RestController; +import java.util.List; import java.util.Optional; @@ -19,10 +21,12 @@ public class JobMonitor { private final JobExplorer jobExplorer; + private final JobControl jobControl; Logger logger = LoggerFactory.getLogger(JobMonitor.class); - public JobMonitor(JobExplorer jobExplorer) { + public JobMonitor(JobExplorer jobExplorer, JobControl jobControl) { + this.jobControl = jobControl; this.jobExplorer = jobExplorer; } diff --git a/src/main/java/org/onedatashare/transferservice/odstransferservice/model/CarbonMeasurement.java b/src/main/java/org/onedatashare/transferservice/odstransferservice/model/CarbonMeasurement.java new file mode 100644 index 00000000..6853c123 --- /dev/null +++ b/src/main/java/org/onedatashare/transferservice/odstransferservice/model/CarbonMeasurement.java @@ -0,0 +1,17 @@ +package org.onedatashare.transferservice.odstransferservice.model; + +import lombok.Data; + +import java.time.LocalDateTime; +import java.util.List; +import java.util.UUID; + +@Data +public class CarbonMeasurement { + + List traceRouteCarbon; + String ownerId; + String transferNodeName; + UUID jobUuid; + LocalDateTime timeMeasuredAt; +} diff --git a/src/main/java/org/onedatashare/transferservice/odstransferservice/service/CarbonJobMeasure.java b/src/main/java/org/onedatashare/transferservice/odstransferservice/service/CarbonJobMeasure.java index 141b2585..ef854f29 100644 --- a/src/main/java/org/onedatashare/transferservice/odstransferservice/service/CarbonJobMeasure.java +++ b/src/main/java/org/onedatashare/transferservice/odstransferservice/service/CarbonJobMeasure.java @@ -7,8 +7,9 @@ import com.hazelcast.query.Predicate; import com.hazelcast.query.PredicateBuilder; import com.hazelcast.query.Predicates; -import org.onedatashare.transferservice.odstransferservice.model.CarbonIntensityMapKey; +import org.onedatashare.transferservice.odstransferservice.Enum.EndpointType; import org.onedatashare.transferservice.odstransferservice.model.CarbonIpEntry; +import org.onedatashare.transferservice.odstransferservice.model.CarbonMeasurement; import org.onedatashare.transferservice.odstransferservice.model.TransferJobRequest; import org.onedatashare.transferservice.odstransferservice.utility.ODSUtility; import org.slf4j.Logger; @@ -19,6 +20,7 @@ import java.io.IOException; import java.time.LocalDateTime; +import java.util.ArrayList; import java.util.Collection; import java.util.List; import java.util.UUID; @@ -27,20 +29,20 @@ @Service public class CarbonJobMeasure { - private final IMap carbonIntensityMap; + private final IMap carbonIntensityMap; private final IMap fileTransferScheduleMap; private final PredicateBuilder.EntryObject entryObj; private final PmeterParser pmeterParser; private final ObjectMapper objectMapper; private final Logger logger = LoggerFactory.getLogger(CarbonJobMeasure.class); - @Value("spring.application.name") + @Value("${spring.application.name}") private String appName; - @Value("ods.user") + @Value("${ods.user}") private String odsUser; - public CarbonJobMeasure(IMap carbonIntensityMap, IMap fileTransferScheduleMap, PmeterParser pmeterParser, ObjectMapper objectMapper) { + public CarbonJobMeasure(IMap carbonIntensityMap, IMap fileTransferScheduleMap, PmeterParser pmeterParser, ObjectMapper objectMapper) { this.carbonIntensityMap = carbonIntensityMap; this.fileTransferScheduleMap = fileTransferScheduleMap; this.entryObj = Predicates.newPredicateBuilder().getEntryObject(); @@ -66,9 +68,10 @@ public List getPotentialJobsFromMap() { }).collect(Collectors.toList()); } - @Scheduled(cron = "0 0/10 * * * ?") + @Scheduled(cron = "*/10 * * * * *") public void measureCarbonOfPotentialJobs() { List potentialJobs = getPotentialJobsFromMap(); + logger.info("Potential jobs from ODS to run: {}", potentialJobs); potentialJobs.forEach(transferJobRequest -> { try { String sourceIp = ""; @@ -83,10 +86,23 @@ public void measureCarbonOfPotentialJobs() { } else { destIp = ODSUtility.uriFromEndpointCredential(transferJobRequest.getDestination().getOauthDestCredential(), transferJobRequest.getDestination().getType()); } - List sourceCarbonPerIp = this.pmeterParser.carbonPerIp(sourceIp); - sourceCarbonPerIp.addAll(this.pmeterParser.carbonPerIp(destIp)); - CarbonIntensityMapKey mapKey = new CarbonIntensityMapKey(transferJobRequest.getOwnerId(), transferJobRequest.getTransferNodeName(), transferJobRequest.getJobUuid(), LocalDateTime.now()); - this.carbonIntensityMap.put(new HazelcastJsonValue(this.objectMapper.writeValueAsString(mapKey)), new HazelcastJsonValue(this.objectMapper.writeValueAsString(sourceCarbonPerIp))); + List totalEntries = new ArrayList<>(); + if (!transferJobRequest.getSource().getType().equals(EndpointType.vfs)) { + totalEntries.addAll(this.pmeterParser.carbonPerIp(sourceIp)); + } + if (transferJobRequest.getDestination().getType().equals(EndpointType.vfs)) { + totalEntries.addAll(this.pmeterParser.carbonPerIp(destIp)); + } + CarbonMeasurement carbonMeasurement = new CarbonMeasurement(); + carbonMeasurement.setTimeMeasuredAt(LocalDateTime.now()); + carbonMeasurement.setJobUuid(transferJobRequest.getJobUuid()); + carbonMeasurement.setOwnerId(transferJobRequest.getOwnerId()); + carbonMeasurement.setTransferNodeName(transferJobRequest.getTransferNodeName()); + carbonMeasurement.setTraceRouteCarbon(totalEntries); + HazelcastJsonValue jsonValue = new HazelcastJsonValue(this.objectMapper.writeValueAsString(carbonMeasurement)); + UUID randomUUID = UUID.randomUUID(); + this.carbonIntensityMap.put(randomUUID, jsonValue); + logger.info("Created Carbon entry with Key={} and Value={}", randomUUID, jsonValue.getValue()); } catch (JsonProcessingException e) { logger.error("Failed to parse job: {} \n Error received: \t {}", transferJobRequest.toString(), e.getMessage()); } catch (IOException e) { diff --git a/src/main/java/org/onedatashare/transferservice/odstransferservice/service/FileTransferNodeRegistrationLifeCycleListener.java b/src/main/java/org/onedatashare/transferservice/odstransferservice/service/FileTransferNodeRegistrationLifeCycleListener.java index 69fd4e21..bba0811b 100644 --- a/src/main/java/org/onedatashare/transferservice/odstransferservice/service/FileTransferNodeRegistrationLifeCycleListener.java +++ b/src/main/java/org/onedatashare/transferservice/odstransferservice/service/FileTransferNodeRegistrationLifeCycleListener.java @@ -1,73 +1,57 @@ -package org.onedatashare.transferservice.odstransferservice.service; - -import com.fasterxml.jackson.core.JsonProcessingException; -import com.fasterxml.jackson.databind.ObjectMapper; -import com.hazelcast.core.HazelcastInstance; -import com.hazelcast.core.HazelcastJsonValue; -import com.hazelcast.core.LifecycleEvent; -import com.hazelcast.core.LifecycleListener; -import com.hazelcast.map.IMap; -import org.onedatashare.transferservice.odstransferservice.model.FileTransferNodeMetaData; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; -import org.springframework.core.env.Environment; - -import java.util.UUID; - -public class FileTransferNodeRegistrationLifeCycleListener implements LifecycleListener { - - private final HazelcastInstance hazelcastInstance; - private final ObjectMapper objectMapper; - private final IMap fileTransferNodeMap; - private final String appName; - private final String odsOwner; - private final UUID nodeUuid; - Logger logger = LoggerFactory.getLogger(FileTransferNodeRegistrationService.class); - - public FileTransferNodeRegistrationLifeCycleListener(HazelcastInstance hazelcastInstance, Environment environment, ObjectMapper objectMapper) { - this.hazelcastInstance = hazelcastInstance; - this.appName = environment.getProperty("spring.application.name"); - this.odsOwner = environment.getProperty("ods.user"); - this.objectMapper = objectMapper; - this.nodeUuid = hazelcastInstance.getLocalEndpoint().getUuid(); - this.fileTransferNodeMap = hazelcastInstance.getMap("file-transfer-node-map"); - } - - @Override - public void stateChanged(LifecycleEvent event) { - if (event.getState() == LifecycleEvent.LifecycleState.CLIENT_CONNECTED) { - FileTransferNodeMetaData fileTransferNodeMetaData = FileTransferNodeMetaData.builder() - .nodeUuid(this.hazelcastInstance.getLocalEndpoint().getUuid()) - .online(true) - .nodeName(this.appName) - .odsOwner(this.odsOwner) - .nodeUuid(this.nodeUuid) - .runningJob(false) - .jobUuid(new UUID(0, 0)) - .jobId(-1) - .build(); - try { - String json = this.objectMapper.writeValueAsString(fileTransferNodeMetaData); - logger.info("Registering client: {}", fileTransferNodeMetaData); - this.fileTransferNodeMap.put(this.appName, new HazelcastJsonValue(json)); - } catch (JsonProcessingException e) { - e.printStackTrace(); - } - } - if (event.getState() == LifecycleEvent.LifecycleState.SHUTTING_DOWN) { - try { - String jsonValue = this.fileTransferNodeMap.get(this.nodeUuid).getValue(); - FileTransferNodeMetaData fileTransferNodeMetaData = this.objectMapper.readValue(jsonValue, FileTransferNodeMetaData.class); - fileTransferNodeMetaData.setRunningJob(false); - fileTransferNodeMetaData.setOnline(false); - logger.info("De-Registering client: {}", fileTransferNodeMetaData); - jsonValue = this.objectMapper.writeValueAsString(fileTransferNodeMetaData); - this.fileTransferNodeMap.put(this.appName, new HazelcastJsonValue(jsonValue)); - } catch (JsonProcessingException e) { - e.printStackTrace(); - } - } - - - } -} +//package org.onedatashare.transferservice.odstransferservice.service; +// +//import com.fasterxml.jackson.core.JsonProcessingException; +//import com.fasterxml.jackson.databind.ObjectMapper; +//import com.hazelcast.core.HazelcastInstance; +//import com.hazelcast.core.HazelcastJsonValue; +//import com.hazelcast.map.IMap; +//import jakarta.annotation.PostConstruct; +//import org.onedatashare.transferservice.odstransferservice.model.FileTransferNodeMetaData; +//import org.slf4j.Logger; +//import org.slf4j.LoggerFactory; +//import org.springframework.core.env.Environment; +//import org.springframework.stereotype.Service; +// +//import java.util.UUID; +// +//@Service +//public class FileTransferNodeRegistrationLifeCycleListener { +// +// private final HazelcastInstance hazelcastInstance; +// private final ObjectMapper objectMapper; +// private final IMap fileTransferNodeMap; +// private final String appName; +// private final String odsOwner; +// private final UUID nodeUuid; +// Logger logger = LoggerFactory.getLogger(FileTransferNodeRegistrationService.class); +// +// public FileTransferNodeRegistrationLifeCycleListener(HazelcastInstance hazelcastInstance, Environment environment, ObjectMapper objectMapper) { +// this.hazelcastInstance = hazelcastInstance; +// this.appName = environment.getProperty("spring.application.name"); +// this.odsOwner = environment.getProperty("ods.user"); +// this.objectMapper = objectMapper; +// this.nodeUuid = hazelcastInstance.getLocalEndpoint().getUuid(); +// this.fileTransferNodeMap = hazelcastInstance.getMap("file-transfer-node-map"); +// } +// +// @PostConstruct +// public void postConstruct() { +// FileTransferNodeMetaData fileTransferNodeMetaData = FileTransferNodeMetaData.builder() +// .nodeUuid(this.hazelcastInstance.getLocalEndpoint().getUuid()) +// .online(true) +// .nodeName(this.appName) +// .odsOwner(this.odsOwner) +// .nodeUuid(this.nodeUuid) +// .runningJob(false) +// .jobUuid(new UUID(0, 0)) +// .jobId(-1) +// .build(); +// try { +// String json = this.objectMapper.writeValueAsString(fileTransferNodeMetaData); +// logger.info("Registering client: {}", fileTransferNodeMetaData); +// this.fileTransferNodeMap.put(this.appName, new HazelcastJsonValue(json)); +// } catch (JsonProcessingException e) { +// e.printStackTrace(); +// } +// } +//} diff --git a/src/main/java/org/onedatashare/transferservice/odstransferservice/service/HazelcastConsumer.java b/src/main/java/org/onedatashare/transferservice/odstransferservice/service/HazelcastConsumer.java index 4a7d185d..8a6818ae 100644 --- a/src/main/java/org/onedatashare/transferservice/odstransferservice/service/HazelcastConsumer.java +++ b/src/main/java/org/onedatashare/transferservice/odstransferservice/service/HazelcastConsumer.java @@ -45,17 +45,20 @@ public void run() { try { HazelcastJsonValue jsonMsg = this.messageQueue.take(); JsonNode jsonNode = this.objectMapper.readTree(jsonMsg.getValue()); - String type = ((ObjectNode) jsonNode).get("type").toString(); + logger.info(jsonNode.toPrettyString()); + String type = ((ObjectNode) jsonNode).get("type").asText(); ((ObjectNode) jsonNode).remove("type"); HazelcastJsonValue properJsonMsg = new HazelcastJsonValue(jsonNode.toString()); logger.info("Received message: {}", properJsonMsg); + logger.info(type); switch (MessageType.valueOf(type)) { - case TRANSFER_JOB_REQUEST: + case MessageType.TRANSFER_JOB_REQUEST: this.transferJobRequestHandler.messageHandler(properJsonMsg); break; - case APPLICATION_PARAM_CHANGE: + case MessageType.APPLICATION_PARAM_CHANGE: this.transferParamApplicationHandler.messageHandler(properJsonMsg); + break; } } catch (InterruptedException | JsonProcessingException e) { logger.error(e.getMessage()); diff --git a/src/main/java/org/onedatashare/transferservice/odstransferservice/service/InfluxIOService.java b/src/main/java/org/onedatashare/transferservice/odstransferservice/service/InfluxIOService.java index 1d0adb93..9012c52c 100644 --- a/src/main/java/org/onedatashare/transferservice/odstransferservice/service/InfluxIOService.java +++ b/src/main/java/org/onedatashare/transferservice/odstransferservice/service/InfluxIOService.java @@ -19,8 +19,8 @@ public class InfluxIOService { private final InfluxDBClient influxClient; Logger logger = LoggerFactory.getLogger(InfluxIOService.class); - @Value("${ods.user}") - private String odsUserName; + @Value("${ods.influx.bucket}") + private String defaultInfluxBucket; @Value("${ods.influx.org}") String org; @@ -35,21 +35,16 @@ public InfluxIOService(InfluxDBClient influxClient) { @PostConstruct public void postConstruct() { - this.reconfigureBucketForNewJob(odsUserName); + this.reconfigureBucketForNewJob(this.defaultInfluxBucket); } - public void reconfigureBucketForNewJob(String ownerId) { - logger.info("********* Reconfiguring the Bucket to Owner {}***********", ownerId); - if (ownerId == null) { - bucket = influxClient.getBucketsApi().findBucketByName(this.odsUserName); - } else { - bucket = influxClient.getBucketsApi().findBucketByName(ownerId); - } - + public void reconfigureBucketForNewJob(String bucketName) { + logger.info("********* Reconfiguring the Bucket to Owner {}***********", bucketName); + bucket = influxClient.getBucketsApi().findBucketByName(bucketName); if (bucket == null) { - logger.info("Creating the Influx bucket name={}, org={}", ownerId, org); + logger.info("Creating the Influx bucket name={}, org={}", bucketName, org); try { - bucket = this.influxClient.getBucketsApi().createBucket(ownerId, org); + bucket = this.influxClient.getBucketsApi().createBucket(bucketName, org); } catch (UnprocessableEntityException ignored) { } } diff --git a/src/main/java/org/onedatashare/transferservice/odstransferservice/service/PmeterParser.java b/src/main/java/org/onedatashare/transferservice/odstransferservice/service/PmeterParser.java index 0fdb231c..746581f4 100644 --- a/src/main/java/org/onedatashare/transferservice/odstransferservice/service/PmeterParser.java +++ b/src/main/java/org/onedatashare/transferservice/odstransferservice/service/PmeterParser.java @@ -68,6 +68,10 @@ public class PmeterParser { @PostConstruct public void init() throws IOException { this.pmeterNic = this.discoverActiveNetworkInterface(); + if(this.pmeterNic == null || this.pmeterNic.isEmpty()) { + this.pmeterNic = "en0"; + } + logger.info("Interface used for monitoring: {}", this.pmeterNic); this.cmdLine = CommandLine.parse(String.format("pmeter " + MEASURE + " %s --user %s --measure %s %s --file_name %s", this.pmeterNic, odsUser, measureCount, pmeterOptions, pmeterMetricsPath)); } @@ -159,7 +163,7 @@ public List carbonPerIp(String ip) throws IOException { return retList; } - private String discoverActiveNetworkInterface() throws IOException { + public String discoverActiveNetworkInterface() throws IOException { // iterate over the network interfaces known to java Enumeration interfaces = NetworkInterface.getNetworkInterfaces(); for (NetworkInterface interface_ : Collections.list(interfaces)) { diff --git a/src/main/java/org/onedatashare/transferservice/odstransferservice/service/VaultSSLService.java b/src/main/java/org/onedatashare/transferservice/odstransferservice/service/VaultSSLService.java index 25818dd4..8e7f0783 100644 --- a/src/main/java/org/onedatashare/transferservice/odstransferservice/service/VaultSSLService.java +++ b/src/main/java/org/onedatashare/transferservice/odstransferservice/service/VaultSSLService.java @@ -47,7 +47,7 @@ public VaultSSLService(Environment environment, VaultTemplate vaultTemplate) { this.vaultPkiOperations = vaultTemplate.opsForPki(); this.storePath = Paths.get(System.getProperty("user.home"), "onedatashare", "ftn", "store", "jobscheduler.truststore.p12"); this.keyStorePassword = environment.getProperty("hz.keystore.password", "changeit"); - this.storeDuration = Duration.ofMinutes(1); + this.storeDuration = Duration.ofDays(1); this.scheduler = Executors.newScheduledThreadPool(0, Thread.ofVirtual().factory()); } diff --git a/src/main/java/org/onedatashare/transferservice/odstransferservice/service/step/http/HttpReader.java b/src/main/java/org/onedatashare/transferservice/odstransferservice/service/step/http/HttpReader.java index d8b1828b..6ab66b85 100644 --- a/src/main/java/org/onedatashare/transferservice/odstransferservice/service/step/http/HttpReader.java +++ b/src/main/java/org/onedatashare/transferservice/odstransferservice/service/step/http/HttpReader.java @@ -107,7 +107,6 @@ public DataChunk read() throws IOException, InterruptedException { request = rangeMode(uri, filePart, this.range); } HttpResponse response = client.send(request, HttpResponse.BodyHandlers.ofByteArray()); - logger.info(Thread.currentThread().toString(), "File Chunk: ", filePart.toString()); return ODSUtility.makeChunk(response.body().length, response.body(), filePart.getStart(), Long.valueOf(filePart.getPartIdx()).intValue(), this.fileName); } diff --git a/src/main/resources/application.properties b/src/main/resources/application.properties index 062fc6a6..e4e0f2cc 100644 --- a/src/main/resources/application.properties +++ b/src/main/resources/application.properties @@ -1,22 +1,20 @@ spring.application.name=${APP_NAME:ODSTransferService} -ods.user=${USER_NAME} +ods.user=${USER_NAME:OneDataShare} server.port=8092 server.shutdown=graceful spring.main.allow-bean-definition-overriding=true gdrive.client.id=${ODS_GDRIVE_CLIENT_ID} gdrive.client.secret=${ODS_GDRIVE_CLIENT_SECRET} -#Eureka config -#eureka.client.enabled=true -#eureka.client.serviceUrl.defaultZone=http://${EUREKA_USER:admin}:${EUREKA_PASS:admin}@${EUREKA_URI:localhost:8090}/eureka -#eureka.client.healthcheck.enabled=true -#eureka.client.registry-fetch-interval-seconds=5 -#eureka.instance.leaseRenewalIntervalInSeconds=10 -#eureka.instance.metadata-map.startup=${random.int} + #SBA management.endpoints.web.exposure.include=* + +eureka.client.enabled=false + #Ignore non-null values spring.jackson.default-property-inclusion=NON_NULL spring.batch.job.enabled=false + #pmeter pmeter.report.path=${PMETER_REPORT_PATH:${HOME}/.pmeter/transfer_service_pmeter_measure.txt} pmeter.cron.run=${ENABLE_PMETER:false} @@ -29,7 +27,7 @@ pmeter.carbon.toggle=${REPORT_CARBON:false} #influx ods.influx.uri=${INFLUX_URI:https://influxdb.onedatashare.org} -ods.influx.bucket=${INFLUX_BUCKET:ods_test} +ods.influx.bucket=${INFLUX_BUCKET:ODSTransferNodes} ods.influx.token=${INFLUX_TOKEN} ods.influx.org=${INFLUX_ORG:OneDataShare} diff --git a/src/test/java/org/onedatashare/transferservice/odstransferservice/service/CarbonJobMeasureTest.java b/src/test/java/org/onedatashare/transferservice/odstransferservice/service/CarbonJobMeasureTest.java index da189f01..379e7a17 100644 --- a/src/test/java/org/onedatashare/transferservice/odstransferservice/service/CarbonJobMeasureTest.java +++ b/src/test/java/org/onedatashare/transferservice/odstransferservice/service/CarbonJobMeasureTest.java @@ -20,7 +20,7 @@ public class CarbonJobMeasureTest { CarbonJobMeasure testObj; - static IMap carbonIntensityMap; + static IMap carbonIntensityMap; static IMap fileTransferScheduleMap; static ObjectMapper objectMapper; @Mock @@ -44,7 +44,7 @@ public void beforeEachTest() { @Test public void testEmptyMapsDefault() { testObj.measureCarbonOfPotentialJobs(); - Assert.assertEquals( 0, carbonIntensityMap.size()); + Assert.assertEquals(0, carbonIntensityMap.size()); } @Test @@ -63,7 +63,4 @@ public void testOneJobInMapForThisNode() throws JsonProcessingException { } - - - } diff --git a/src/test/java/org/onedatashare/transferservice/odstransferservice/service/PmeterParserTest.java b/src/test/java/org/onedatashare/transferservice/odstransferservice/service/PmeterParserTest.java new file mode 100644 index 00000000..0613d38d --- /dev/null +++ b/src/test/java/org/onedatashare/transferservice/odstransferservice/service/PmeterParserTest.java @@ -0,0 +1,24 @@ +package org.onedatashare.transferservice.odstransferservice.service; + +import org.junit.Assert; +import org.junit.jupiter.api.BeforeAll; +import org.junit.jupiter.api.Test; + +import java.io.IOException; + +public class PmeterParserTest { + + PmeterParser testObj; + + @Test + public void parsePmeter() { + testObj = new PmeterParser(); + try { + String interfaceToUse = testObj.discoverActiveNetworkInterface(); + Assert.assertEquals("en0", interfaceToUse); + } catch (IOException e) { + throw new RuntimeException(e); + } + + } +} From b63ed87225cfec33e551f5c3576a92aeb9a84dc3 Mon Sep 17 00:00:00 2001 From: Jacob Goldverg Date: Mon, 11 Nov 2024 23:27:26 -0500 Subject: [PATCH 133/150] Carbon monitoring happens every 10min not 10sec --- .../odstransferservice/service/CarbonJobMeasure.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/main/java/org/onedatashare/transferservice/odstransferservice/service/CarbonJobMeasure.java b/src/main/java/org/onedatashare/transferservice/odstransferservice/service/CarbonJobMeasure.java index ef854f29..9cb9bd67 100644 --- a/src/main/java/org/onedatashare/transferservice/odstransferservice/service/CarbonJobMeasure.java +++ b/src/main/java/org/onedatashare/transferservice/odstransferservice/service/CarbonJobMeasure.java @@ -68,7 +68,7 @@ public List getPotentialJobsFromMap() { }).collect(Collectors.toList()); } - @Scheduled(cron = "*/10 * * * * *") + @Scheduled(cron = "* */10 * * * *") public void measureCarbonOfPotentialJobs() { List potentialJobs = getPotentialJobsFromMap(); logger.info("Potential jobs from ODS to run: {}", potentialJobs); From 2bd7f1a62e4b2299bea26f5a494e108b04921dd2 Mon Sep 17 00:00:00 2001 From: Jacob Goldverg Date: Mon, 11 Nov 2024 23:51:21 -0500 Subject: [PATCH 134/150] Forgot that we dont yet have a load balancer infront of the hz cluster yet --- .../odstransferservice/config/HazelcastClientConfig.java | 1 + src/main/resources/application.properties | 3 ++- 2 files changed, 3 insertions(+), 1 deletion(-) diff --git a/src/main/java/org/onedatashare/transferservice/odstransferservice/config/HazelcastClientConfig.java b/src/main/java/org/onedatashare/transferservice/odstransferservice/config/HazelcastClientConfig.java index 1af6c30e..e3cc56a1 100644 --- a/src/main/java/org/onedatashare/transferservice/odstransferservice/config/HazelcastClientConfig.java +++ b/src/main/java/org/onedatashare/transferservice/odstransferservice/config/HazelcastClientConfig.java @@ -56,6 +56,7 @@ public ClientConfig prodClientConfig(SSLConfig sslConfig) { ClientConfig clientConfig = new ClientConfig(); clientConfig.setClusterName("prod-scheduler-cluster"); clientConfig.getNetworkConfig().setSSLConfig(sslConfig); + clientConfig.getNetworkConfig().addAddress(env.getProperty("hz.ipaddr", "localhost")); return clientConfig; } diff --git a/src/main/resources/application.properties b/src/main/resources/application.properties index e4e0f2cc..1e81e1b4 100644 --- a/src/main/resources/application.properties +++ b/src/main/resources/application.properties @@ -35,4 +35,5 @@ transfer.service.concurrency=${MAX_CONCURRENCY:32} transfer.service.parallelism=${MAX_PARALLELISM:32} transfer.service.pipelining=${MAX_PIPELINING:32} -hz.keystore.password=changeit +hz.keystore.password=${HZ_KEYSTORE_PASSWORD:changeit} +hz.ipaddr=${HZ_IP_ADDR:localhost} From 4fd3a6b14155336b469af0327e30a5de036408b8 Mon Sep 17 00:00:00 2001 From: Jacob Goldverg Date: Tue, 12 Nov 2024 13:03:44 -0500 Subject: [PATCH 135/150] pmeter nic now supports giving defaults --- .../odstransferservice/service/PmeterParser.java | 7 ++++--- src/main/resources/application.properties | 1 + 2 files changed, 5 insertions(+), 3 deletions(-) diff --git a/src/main/java/org/onedatashare/transferservice/odstransferservice/service/PmeterParser.java b/src/main/java/org/onedatashare/transferservice/odstransferservice/service/PmeterParser.java index 746581f4..a96b60d2 100644 --- a/src/main/java/org/onedatashare/transferservice/odstransferservice/service/PmeterParser.java +++ b/src/main/java/org/onedatashare/transferservice/odstransferservice/service/PmeterParser.java @@ -37,6 +37,8 @@ public class PmeterParser { private final PumpStreamHandler streamHandler; private final DefaultExecutor pmeterExecutor; private final ExecuteWatchdog watchDog; + + @Value("${pmeter.nic}") private String pmeterNic; Logger logger = LoggerFactory.getLogger(PmeterParser.class); @@ -67,9 +69,8 @@ public class PmeterParser { @PostConstruct public void init() throws IOException { - this.pmeterNic = this.discoverActiveNetworkInterface(); - if(this.pmeterNic == null || this.pmeterNic.isEmpty()) { - this.pmeterNic = "en0"; + if(this.pmeterNic == null || !this.pmeterNic.isEmpty()) { + this.pmeterNic = this.discoverActiveNetworkInterface(); } logger.info("Interface used for monitoring: {}", this.pmeterNic); this.cmdLine = CommandLine.parse(String.format("pmeter " + MEASURE + " %s --user %s --measure %s %s --file_name %s", this.pmeterNic, odsUser, measureCount, pmeterOptions, pmeterMetricsPath)); diff --git a/src/main/resources/application.properties b/src/main/resources/application.properties index 1e81e1b4..730ea16c 100644 --- a/src/main/resources/application.properties +++ b/src/main/resources/application.properties @@ -24,6 +24,7 @@ pmeter.cron.expression=${PMETER_CRON_EXP:*/5 * * * * *} pmeter.carbon.path=${PMETER_CARBON_PATH:${HOME}/.pmeter/carbon_pmeter.txt} pmeter.carbon.map=${PMETER_CARBON_MAP:${HOME}/.pmeter/carbon_ip_map.json} pmeter.carbon.toggle=${REPORT_CARBON:false} +pmeter.nic=${PMETER_NIC_INTERFACE} #influx ods.influx.uri=${INFLUX_URI:https://influxdb.onedatashare.org} From a4615a608f7323d6fc2348a1bd2b3f4a1005fe78 Mon Sep 17 00:00:00 2001 From: Jacob Goldverg Date: Tue, 12 Nov 2024 13:33:41 -0500 Subject: [PATCH 136/150] changing timing --- .../odstransferservice/service/CarbonJobMeasure.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/main/java/org/onedatashare/transferservice/odstransferservice/service/CarbonJobMeasure.java b/src/main/java/org/onedatashare/transferservice/odstransferservice/service/CarbonJobMeasure.java index 9cb9bd67..27d0ed28 100644 --- a/src/main/java/org/onedatashare/transferservice/odstransferservice/service/CarbonJobMeasure.java +++ b/src/main/java/org/onedatashare/transferservice/odstransferservice/service/CarbonJobMeasure.java @@ -68,7 +68,7 @@ public List getPotentialJobsFromMap() { }).collect(Collectors.toList()); } - @Scheduled(cron = "* */10 * * * *") + @Scheduled(cron = "0 * * * * *") public void measureCarbonOfPotentialJobs() { List potentialJobs = getPotentialJobsFromMap(); logger.info("Potential jobs from ODS to run: {}", potentialJobs); From d0b45e71a578c572030090108d6c1b422b22e30d Mon Sep 17 00:00:00 2001 From: Jacob Goldverg Date: Tue, 12 Nov 2024 13:36:47 -0500 Subject: [PATCH 137/150] pinning ods nodes correctly --- .../odstransferservice/service/CarbonJobMeasure.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/main/java/org/onedatashare/transferservice/odstransferservice/service/CarbonJobMeasure.java b/src/main/java/org/onedatashare/transferservice/odstransferservice/service/CarbonJobMeasure.java index 27d0ed28..934544f5 100644 --- a/src/main/java/org/onedatashare/transferservice/odstransferservice/service/CarbonJobMeasure.java +++ b/src/main/java/org/onedatashare/transferservice/odstransferservice/service/CarbonJobMeasure.java @@ -52,7 +52,7 @@ public CarbonJobMeasure(IMap carbonIntensityMap, IMap< public List getPotentialJobsFromMap() { Predicate potentialJobs; - if (odsUser.equals("onedatashare")) { + if (odsUser.equals("OneDataShare")) { potentialJobs = this.entryObj.get("options.transferNodeName").equal(this.appName).or(this.entryObj.get("options.transferNodeName").equal("")); } else { potentialJobs = this.entryObj.get("options.transferNodeName").equal(appName).or(this.entryObj.get("source.credId").equal(appName)).or(this.entryObj.get("destination.credId").equal(appName)); From 43d01295ae6d97b16e4f9ae0c182529b823ebea2 Mon Sep 17 00:00:00 2001 From: Jacob Goldverg Date: Tue, 12 Nov 2024 13:42:15 -0500 Subject: [PATCH 138/150] including more logging --- .../odstransferservice/service/CarbonJobMeasure.java | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/src/main/java/org/onedatashare/transferservice/odstransferservice/service/CarbonJobMeasure.java b/src/main/java/org/onedatashare/transferservice/odstransferservice/service/CarbonJobMeasure.java index 934544f5..6317b659 100644 --- a/src/main/java/org/onedatashare/transferservice/odstransferservice/service/CarbonJobMeasure.java +++ b/src/main/java/org/onedatashare/transferservice/odstransferservice/service/CarbonJobMeasure.java @@ -62,7 +62,8 @@ public List getPotentialJobsFromMap() { return jsonJobs.stream().map(hazelcastJsonValue -> { try { return this.objectMapper.readValue(hazelcastJsonValue.getValue(), TransferJobRequest.class); - } catch (JsonProcessingException ignored) { + } catch (JsonProcessingException e) { + logger.error("Json Processing Exception: {}\n With message: {}", e, e.getMessage()); } return null; }).collect(Collectors.toList()); From 2f77f4bca4b4ba6d1e75f1255c9f71e8bdf5e5ea Mon Sep 17 00:00:00 2001 From: Jacob Goldverg Date: Wed, 20 Nov 2024 20:15:23 -0500 Subject: [PATCH 139/150] Made hazelcast consumer fully async --- .../odstransferservice/Enum/MessageType.java | 2 +- .../message/StopJobRequestHandler.java | 46 +++++++++++ .../message/TransferJobRequestHandler.java | 9 ++- .../model/StopJobRequest.java | 12 +++ .../service/HazelcastConsumer.java | 79 +++++++++++-------- .../service/PmeterParser.java | 23 +++--- src/main/resources/application.properties | 2 +- .../service/PmeterParserTest.java | 29 +++++-- 8 files changed, 147 insertions(+), 55 deletions(-) create mode 100644 src/main/java/org/onedatashare/transferservice/odstransferservice/message/StopJobRequestHandler.java create mode 100644 src/main/java/org/onedatashare/transferservice/odstransferservice/model/StopJobRequest.java diff --git a/src/main/java/org/onedatashare/transferservice/odstransferservice/Enum/MessageType.java b/src/main/java/org/onedatashare/transferservice/odstransferservice/Enum/MessageType.java index 5acb02fe..60f7a005 100644 --- a/src/main/java/org/onedatashare/transferservice/odstransferservice/Enum/MessageType.java +++ b/src/main/java/org/onedatashare/transferservice/odstransferservice/Enum/MessageType.java @@ -1,5 +1,5 @@ package org.onedatashare.transferservice.odstransferservice.Enum; public enum MessageType { - CARBON_AVG_REQUEST, TRANSFER_JOB_REQUEST, APPLICATION_PARAM_CHANGE, CARBON_IP_REQUEST + CARBON_AVG_REQUEST, TRANSFER_JOB_REQUEST, APPLICATION_PARAM_CHANGE, STOP_JOB_REQUEST, CARBON_IP_REQUEST } diff --git a/src/main/java/org/onedatashare/transferservice/odstransferservice/message/StopJobRequestHandler.java b/src/main/java/org/onedatashare/transferservice/odstransferservice/message/StopJobRequestHandler.java new file mode 100644 index 00000000..880fba4b --- /dev/null +++ b/src/main/java/org/onedatashare/transferservice/odstransferservice/message/StopJobRequestHandler.java @@ -0,0 +1,46 @@ +package org.onedatashare.transferservice.odstransferservice.message; + +import com.fasterxml.jackson.databind.ObjectMapper; +import com.hazelcast.core.HazelcastJsonValue; +import org.onedatashare.transferservice.odstransferservice.model.StopJobRequest; +import org.onedatashare.transferservice.odstransferservice.service.JobControl; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.springframework.batch.core.JobExecution; +import org.springframework.batch.core.explore.JobExplorer; +import org.springframework.batch.core.launch.JobExecutionNotRunningException; +import org.springframework.batch.core.launch.JobOperator; +import org.springframework.batch.core.launch.NoSuchJobExecutionException; +import org.springframework.stereotype.Service; + +import java.io.IOException; +import java.util.Set; + +@Service +public class StopJobRequestHandler implements MessageHandler { + + private final ObjectMapper objectMapper; + private final JobOperator jobOperator; + private final JobExplorer jobExplorer; + private final Logger logger; + + public StopJobRequestHandler(JobExplorer jobExplorer, JobOperator jobOperator, ObjectMapper objectMapper) { + this.jobOperator = jobOperator; + this.objectMapper = objectMapper; + this.jobExplorer = jobExplorer; + this.logger = LoggerFactory.getLogger(StopJobRequestHandler.class); + } + + @Override + public void messageHandler(HazelcastJsonValue jsonMsg) throws IOException { + StopJobRequest stopJobRequest = this.objectMapper.readValue(jsonMsg.getValue(), StopJobRequest.class); + Set jobExecutionSet = this.jobExplorer.findRunningJobExecutions(stopJobRequest.getJobUuid().toString()); + for (JobExecution jobExecution : jobExecutionSet) { + try { + jobOperator.stop(jobExecution.getId()); + } catch (NoSuchJobExecutionException | JobExecutionNotRunningException e) { + logger.error("Was unable to stop job: {} with error message: {}", jobExecution, e.getMessage()); + } + } + } +} diff --git a/src/main/java/org/onedatashare/transferservice/odstransferservice/message/TransferJobRequestHandler.java b/src/main/java/org/onedatashare/transferservice/odstransferservice/message/TransferJobRequestHandler.java index 57069502..dbbdcba0 100644 --- a/src/main/java/org/onedatashare/transferservice/odstransferservice/message/TransferJobRequestHandler.java +++ b/src/main/java/org/onedatashare/transferservice/odstransferservice/message/TransferJobRequestHandler.java @@ -20,7 +20,6 @@ public class TransferJobRequestHandler implements MessageHandler { private final JobControl jobControl; private final ExpanderFactory expanderFactory; - Logger logger = LoggerFactory.getLogger(TransferJobRequestHandler.class); public TransferJobRequestHandler(ObjectMapper messageObjectMapper, JobControl jobControl, ExpanderFactory expanderFactory) { @@ -32,7 +31,13 @@ public TransferJobRequestHandler(ObjectMapper messageObjectMapper, JobControl jo @Override public void messageHandler(HazelcastJsonValue jsonMessage) throws JsonProcessingException { String jsonStr = jsonMessage.getValue(); - TransferJobRequest request = objectMapper.readValue(jsonStr, TransferJobRequest.class); + TransferJobRequest request = null; + try { + request = objectMapper.readValue(jsonStr, TransferJobRequest.class); + } catch (JsonProcessingException e) { + logger.error("Failed to parse Transfer Job Request: {}", jsonStr); + return; + } logger.info("Job Received: {}", request.toString()); List fileInfo = expanderFactory.getExpander(request.getSource()); request.getSource().setInfoList(fileInfo); diff --git a/src/main/java/org/onedatashare/transferservice/odstransferservice/model/StopJobRequest.java b/src/main/java/org/onedatashare/transferservice/odstransferservice/model/StopJobRequest.java new file mode 100644 index 00000000..b6f18c16 --- /dev/null +++ b/src/main/java/org/onedatashare/transferservice/odstransferservice/model/StopJobRequest.java @@ -0,0 +1,12 @@ +package org.onedatashare.transferservice.odstransferservice.model; + +import lombok.Data; + +import java.util.UUID; + +@Data +public class StopJobRequest { + UUID jobUuid; + Integer jobId; + String ownerId; +} diff --git a/src/main/java/org/onedatashare/transferservice/odstransferservice/service/HazelcastConsumer.java b/src/main/java/org/onedatashare/transferservice/odstransferservice/service/HazelcastConsumer.java index 8a6818ae..3bb0c38d 100644 --- a/src/main/java/org/onedatashare/transferservice/odstransferservice/service/HazelcastConsumer.java +++ b/src/main/java/org/onedatashare/transferservice/odstransferservice/service/HazelcastConsumer.java @@ -6,64 +6,79 @@ import com.fasterxml.jackson.databind.node.ObjectNode; import com.hazelcast.collection.IQueue; import com.hazelcast.core.HazelcastJsonValue; -import jakarta.annotation.PostConstruct; import org.onedatashare.transferservice.odstransferservice.Enum.MessageType; +import org.onedatashare.transferservice.odstransferservice.message.StopJobRequestHandler; import org.onedatashare.transferservice.odstransferservice.message.TransferApplicationParamHandler; import org.onedatashare.transferservice.odstransferservice.message.TransferJobRequestHandler; +import org.onedatashare.transferservice.odstransferservice.pools.ThreadPoolContract; import org.slf4j.Logger; import org.slf4j.LoggerFactory; +import org.springframework.core.task.TaskExecutor; +import org.springframework.scheduling.annotation.Scheduled; import org.springframework.stereotype.Service; +import java.io.IOException; + @Service -public class HazelcastConsumer implements Runnable { +public class HazelcastConsumer { private final IQueue messageQueue; private final ObjectMapper objectMapper; private final TransferJobRequestHandler transferJobRequestHandler; private final TransferApplicationParamHandler transferParamApplicationHandler; private final Logger logger; - private Thread consumerThread; + private final StopJobRequestHandler stopJobRequestHandler; + private final TaskExecutor executor; - public HazelcastConsumer(IQueue messageQueue, ObjectMapper objectMapper, TransferJobRequestHandler transferJobRequestHandler, TransferApplicationParamHandler transferApplicationParamHandler) { + public HazelcastConsumer(ThreadPoolContract threadPoolContract, StopJobRequestHandler stopJobRequestHandler, IQueue messageQueue, ObjectMapper objectMapper, TransferJobRequestHandler transferJobRequestHandler, TransferApplicationParamHandler transferApplicationParamHandler) { this.messageQueue = messageQueue; this.transferJobRequestHandler = transferJobRequestHandler; this.objectMapper = objectMapper; this.transferParamApplicationHandler = transferApplicationParamHandler; this.logger = LoggerFactory.getLogger(HazelcastConsumer.class); - this.consumerThread = new Thread(this); - } - - @PostConstruct - public void init() { - this.consumerThread.start(); + this.stopJobRequestHandler = stopJobRequestHandler; + this.executor = threadPoolContract.createExecutor(4, "HazelcastConsumer"); } - @Override - public void run() { - while (true) { - try { - HazelcastJsonValue jsonMsg = this.messageQueue.take(); - JsonNode jsonNode = this.objectMapper.readTree(jsonMsg.getValue()); - logger.info(jsonNode.toPrettyString()); - String type = ((ObjectNode) jsonNode).get("type").asText(); - ((ObjectNode) jsonNode).remove("type"); - HazelcastJsonValue properJsonMsg = new HazelcastJsonValue(jsonNode.toString()); - logger.info("Received message: {}", properJsonMsg); - logger.info(type); - switch (MessageType.valueOf(type)) { - case MessageType.TRANSFER_JOB_REQUEST: + @Scheduled(cron = "0/5 * * * * *", fixedDelay = 0) + public void runConsumer() throws InterruptedException, JsonProcessingException { + HazelcastJsonValue jsonMsg = this.messageQueue.poll(); + if(jsonMsg == null) return; + JsonNode jsonNode = this.objectMapper.readTree(jsonMsg.getValue()); + logger.info(jsonNode.toPrettyString()); + String type = ((ObjectNode) jsonNode).get("type").asText(); + ((ObjectNode) jsonNode).remove("type"); + HazelcastJsonValue properJsonMsg = new HazelcastJsonValue(jsonNode.toString()); + logger.info("Received message: {}", properJsonMsg); + logger.info(type); + this.executor.execute(() -> { + switch (MessageType.valueOf(type)) { + case MessageType.TRANSFER_JOB_REQUEST: + try { this.transferJobRequestHandler.messageHandler(properJsonMsg); - break; + } catch (JsonProcessingException e) { + logger.error("Failed to parse json in TransferJobReqeust Message Handler: {} \n Error: {}", properJsonMsg, e.getMessage()); + } + break; - case MessageType.APPLICATION_PARAM_CHANGE: + case MessageType.APPLICATION_PARAM_CHANGE: + try { this.transferParamApplicationHandler.messageHandler(properJsonMsg); - break; - } - } catch (InterruptedException | JsonProcessingException e) { - logger.error(e.getMessage()); - } + } catch (JsonProcessingException e) { + logger.error("Failed to parse json in TransferParam Message Handler: {} \n Error: {}", properJsonMsg, e.getMessage()); + } + break; - } + case MessageType.STOP_JOB_REQUEST: + try { + this.stopJobRequestHandler.messageHandler(properJsonMsg); + } catch (IOException e) { + logger.error("Failed to parse json in Stop Job Message Handler: {} \n Error: {}", properJsonMsg, e.getMessage()); + } + break; + } + }); } + } diff --git a/src/main/java/org/onedatashare/transferservice/odstransferservice/service/PmeterParser.java b/src/main/java/org/onedatashare/transferservice/odstransferservice/service/PmeterParser.java index a96b60d2..229c20a5 100644 --- a/src/main/java/org/onedatashare/transferservice/odstransferservice/service/PmeterParser.java +++ b/src/main/java/org/onedatashare/transferservice/odstransferservice/service/PmeterParser.java @@ -15,6 +15,7 @@ import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.beans.factory.annotation.Value; +import org.springframework.core.env.Environment; import org.springframework.stereotype.Service; import java.io.ByteArrayOutputStream; @@ -38,8 +39,7 @@ public class PmeterParser { private final DefaultExecutor pmeterExecutor; private final ExecuteWatchdog watchDog; - @Value("${pmeter.nic}") - private String pmeterNic; + public String pmeterNic; Logger logger = LoggerFactory.getLogger(PmeterParser.class); @@ -67,16 +67,8 @@ public class PmeterParser { ObjectMapper pmeterMapper; private CommandLine cmdLine; - @PostConstruct - public void init() throws IOException { - if(this.pmeterNic == null || !this.pmeterNic.isEmpty()) { - this.pmeterNic = this.discoverActiveNetworkInterface(); - } - logger.info("Interface used for monitoring: {}", this.pmeterNic); - this.cmdLine = CommandLine.parse(String.format("pmeter " + MEASURE + " %s --user %s --measure %s %s --file_name %s", this.pmeterNic, odsUser, measureCount, pmeterOptions, pmeterMetricsPath)); - } - public PmeterParser() { + public PmeterParser(Environment environment) { this.outputStream = new ByteArrayOutputStream(); this.streamHandler = new PumpStreamHandler(outputStream); @@ -88,7 +80,16 @@ public PmeterParser() { this.pmeterMapper = new ObjectMapper(); this.pmeterMapper.registerModule(new JavaTimeModule()); this.pmeterMapper.configure(SerializationFeature.WRITE_DATE_KEYS_AS_TIMESTAMPS, false); + this.pmeterNic = environment.getProperty("pmeter.nic", ""); + } + @PostConstruct + public void init() throws IOException { + if(this.pmeterNic.isEmpty()) { + this.pmeterNic = this.discoverActiveNetworkInterface(); + } + logger.info("Interface used for monitoring: {}", this.pmeterNic); + this.cmdLine = CommandLine.parse(String.format("pmeter " + MEASURE + " %s --user %s --measure %s %s --file_name %s", this.pmeterNic, odsUser, measureCount, pmeterOptions, pmeterMetricsPath)); } diff --git a/src/main/resources/application.properties b/src/main/resources/application.properties index 730ea16c..eda7c8c2 100644 --- a/src/main/resources/application.properties +++ b/src/main/resources/application.properties @@ -24,7 +24,7 @@ pmeter.cron.expression=${PMETER_CRON_EXP:*/5 * * * * *} pmeter.carbon.path=${PMETER_CARBON_PATH:${HOME}/.pmeter/carbon_pmeter.txt} pmeter.carbon.map=${PMETER_CARBON_MAP:${HOME}/.pmeter/carbon_ip_map.json} pmeter.carbon.toggle=${REPORT_CARBON:false} -pmeter.nic=${PMETER_NIC_INTERFACE} +pmeter.nic=${PMETER_NIC_INTERFACE:""} #influx ods.influx.uri=${INFLUX_URI:https://influxdb.onedatashare.org} diff --git a/src/test/java/org/onedatashare/transferservice/odstransferservice/service/PmeterParserTest.java b/src/test/java/org/onedatashare/transferservice/odstransferservice/service/PmeterParserTest.java index 0613d38d..46ffa9f7 100644 --- a/src/test/java/org/onedatashare/transferservice/odstransferservice/service/PmeterParserTest.java +++ b/src/test/java/org/onedatashare/transferservice/odstransferservice/service/PmeterParserTest.java @@ -3,22 +3,35 @@ import org.junit.Assert; import org.junit.jupiter.api.BeforeAll; import org.junit.jupiter.api.Test; +import org.mockito.Mockito; +import org.springframework.boot.test.mock.mockito.MockBean; +import org.springframework.core.env.Environment; import java.io.IOException; + public class PmeterParserTest { PmeterParser testObj; + @MockBean + Environment environment; + @Test - public void parsePmeter() { - testObj = new PmeterParser(); - try { - String interfaceToUse = testObj.discoverActiveNetworkInterface(); - Assert.assertEquals("en0", interfaceToUse); - } catch (IOException e) { - throw new RuntimeException(e); - } + public void testPmeterNicDefaultEmpty() throws IOException { + this.environment = Mockito.mock(Environment.class); + Mockito.when(environment.getProperty("pmeter.nic", "")).thenReturn(""); + testObj = new PmeterParser(this.environment); + testObj.init(); + Assert.assertEquals("en0",this.testObj.pmeterNic); + } + @Test + public void testPmeterNicGivenValue() throws IOException { + this.environment = Mockito.mock(Environment.class); + Mockito.when(environment.getProperty("pmeter.nic", "")).thenReturn("en0"); + testObj = new PmeterParser(this.environment); + testObj.init(); + Assert.assertEquals("en0",this.testObj.pmeterNic); } } From cb5c1136b4654306ab22313630ae0b15d0494417 Mon Sep 17 00:00:00 2001 From: Jacob Goldverg Date: Wed, 20 Nov 2024 20:49:26 -0500 Subject: [PATCH 140/150] forgot to remove fixedDelay --- .../odstransferservice/service/HazelcastConsumer.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/main/java/org/onedatashare/transferservice/odstransferservice/service/HazelcastConsumer.java b/src/main/java/org/onedatashare/transferservice/odstransferservice/service/HazelcastConsumer.java index 3bb0c38d..229ddac1 100644 --- a/src/main/java/org/onedatashare/transferservice/odstransferservice/service/HazelcastConsumer.java +++ b/src/main/java/org/onedatashare/transferservice/odstransferservice/service/HazelcastConsumer.java @@ -41,7 +41,7 @@ public HazelcastConsumer(ThreadPoolContract threadPoolContract, StopJobRequestHa } - @Scheduled(cron = "0/5 * * * * *", fixedDelay = 0) + @Scheduled(cron = "0/5 * * * * *") public void runConsumer() throws InterruptedException, JsonProcessingException { HazelcastJsonValue jsonMsg = this.messageQueue.poll(); if(jsonMsg == null) return; From 7fa6be051241062b523ac3a637edc7ddddf10bb2 Mon Sep 17 00:00:00 2001 From: Jacob Goldverg Date: Mon, 25 Nov 2024 14:34:28 -0500 Subject: [PATCH 141/150] Making hz consumer have unlimited threads --- .../odstransferservice/service/HazelcastConsumer.java | 10 ++++------ .../service/expanders/ExpanderFactory.java | 5 ++--- .../service/expanders/HttpExpander.java | 3 --- 3 files changed, 6 insertions(+), 12 deletions(-) diff --git a/src/main/java/org/onedatashare/transferservice/odstransferservice/service/HazelcastConsumer.java b/src/main/java/org/onedatashare/transferservice/odstransferservice/service/HazelcastConsumer.java index 229ddac1..f5e70119 100644 --- a/src/main/java/org/onedatashare/transferservice/odstransferservice/service/HazelcastConsumer.java +++ b/src/main/java/org/onedatashare/transferservice/odstransferservice/service/HazelcastConsumer.java @@ -37,21 +37,19 @@ public HazelcastConsumer(ThreadPoolContract threadPoolContract, StopJobRequestHa this.transferParamApplicationHandler = transferApplicationParamHandler; this.logger = LoggerFactory.getLogger(HazelcastConsumer.class); this.stopJobRequestHandler = stopJobRequestHandler; - this.executor = threadPoolContract.createExecutor(4, "HazelcastConsumer"); + this.executor = threadPoolContract.createExecutor(-1, "HazelcastConsumer"); } @Scheduled(cron = "0/5 * * * * *") - public void runConsumer() throws InterruptedException, JsonProcessingException { + public void runConsumer() throws JsonProcessingException { HazelcastJsonValue jsonMsg = this.messageQueue.poll(); - if(jsonMsg == null) return; + if (jsonMsg == null) return; JsonNode jsonNode = this.objectMapper.readTree(jsonMsg.getValue()); - logger.info(jsonNode.toPrettyString()); + logger.info("Got Msg: {}", jsonNode.toPrettyString()); String type = ((ObjectNode) jsonNode).get("type").asText(); ((ObjectNode) jsonNode).remove("type"); HazelcastJsonValue properJsonMsg = new HazelcastJsonValue(jsonNode.toString()); - logger.info("Received message: {}", properJsonMsg); - logger.info(type); this.executor.execute(() -> { switch (MessageType.valueOf(type)) { case MessageType.TRANSFER_JOB_REQUEST: diff --git a/src/main/java/org/onedatashare/transferservice/odstransferservice/service/expanders/ExpanderFactory.java b/src/main/java/org/onedatashare/transferservice/odstransferservice/service/expanders/ExpanderFactory.java index 2098ef7c..489a1827 100644 --- a/src/main/java/org/onedatashare/transferservice/odstransferservice/service/expanders/ExpanderFactory.java +++ b/src/main/java/org/onedatashare/transferservice/odstransferservice/service/expanders/ExpanderFactory.java @@ -1,6 +1,5 @@ package org.onedatashare.transferservice.odstransferservice.service.expanders; -import org.onedatashare.transferservice.odstransferservice.Enum.EndpointType; import org.onedatashare.transferservice.odstransferservice.model.EntityInfo; import org.onedatashare.transferservice.odstransferservice.model.TransferJobRequest; import org.springframework.stereotype.Service; @@ -10,8 +9,8 @@ @Service public class ExpanderFactory { - public List getExpander(TransferJobRequest.Source source){ - switch (source.getType()){ + public List getExpander(TransferJobRequest.Source source) { + switch (source.getType()) { case vfs -> { VfsExpander vfsExpander = new VfsExpander(); vfsExpander.createClient(source.getVfsSourceCredential()); diff --git a/src/main/java/org/onedatashare/transferservice/odstransferservice/service/expanders/HttpExpander.java b/src/main/java/org/onedatashare/transferservice/odstransferservice/service/expanders/HttpExpander.java index 11dcaf83..183c74f8 100644 --- a/src/main/java/org/onedatashare/transferservice/odstransferservice/service/expanders/HttpExpander.java +++ b/src/main/java/org/onedatashare/transferservice/odstransferservice/service/expanders/HttpExpander.java @@ -76,7 +76,6 @@ public List expandedFileSystem(List userSelectedResource if (directory.text().contains("..") || directory.text().contains(".")) { continue; } - logger.info("File: {}", directory.baseUri() + directory.text()); Document doc = Jsoup.connect(directory.baseUri() + basePath +directory.text()).get(); Elements links = doc.select("body a"); for (Element elem : links) { @@ -98,7 +97,6 @@ public EntityInfo fromElement(Element elem, Integer chunkSize) throws IOExceptio URL url = new URL(elem.absUrl("href")); long fileSize = url.openConnection().getContentLengthLong(); Path path = Path.of(url.getPath()); - logger.info("File path: {}, name: {}, size: {},", path, elem.text(), fileSize); fileInfo.setId(elem.text()); fileInfo.setSize(fileSize); fileInfo.setPath(path.toAbsolutePath().toString()); @@ -115,7 +113,6 @@ public EntityInfo fileToInfo(String strUrl, Integer chunkSize) throws IOExceptio URLConnection conn = url.openConnection(); long fileSize = conn.getContentLengthLong(); String fileName = Paths.get(conn.getURL().getFile()).getFileName().toString(); - logger.info("File path: {}, name: {}, size: {},", url.getPath(), fileName, fileSize); fileInfo.setId(fileName); fileInfo.setSize(fileSize); fileInfo.setPath(url.getPath()); From f7a738f587d7fd12254c5e29f6ec17acf2d253a7 Mon Sep 17 00:00:00 2001 From: Jacob Goldverg Date: Mon, 25 Nov 2024 14:39:08 -0500 Subject: [PATCH 142/150] Should have made pools fully scaleable --- .../odstransferservice/pools/ThreadPoolManagerPlatform.java | 6 ++++-- .../odstransferservice/pools/ThreadPoolManagerVirtual.java | 4 +++- .../odstransferservice/service/HazelcastConsumer.java | 2 +- 3 files changed, 8 insertions(+), 4 deletions(-) diff --git a/src/main/java/org/onedatashare/transferservice/odstransferservice/pools/ThreadPoolManagerPlatform.java b/src/main/java/org/onedatashare/transferservice/odstransferservice/pools/ThreadPoolManagerPlatform.java index 18ff73ca..f5fbe593 100644 --- a/src/main/java/org/onedatashare/transferservice/odstransferservice/pools/ThreadPoolManagerPlatform.java +++ b/src/main/java/org/onedatashare/transferservice/odstransferservice/pools/ThreadPoolManagerPlatform.java @@ -24,8 +24,10 @@ public ThreadPoolManagerPlatform() { @Override public ThreadPoolTaskExecutor createExecutor(int threadCount, String prefix) { ThreadPoolTaskExecutor executor = new ThreadPoolTaskExecutor(); - executor.setAllowCoreThreadTimeOut(false); - executor.setCorePoolSize(threadCount); + executor.setAllowCoreThreadTimeOut(true); + if(threadCount > 0) { + executor.setCorePoolSize(threadCount); + } executor.setPrestartAllCoreThreads(true); executor.setThreadNamePrefix(prefix); executor.initialize(); diff --git a/src/main/java/org/onedatashare/transferservice/odstransferservice/pools/ThreadPoolManagerVirtual.java b/src/main/java/org/onedatashare/transferservice/odstransferservice/pools/ThreadPoolManagerVirtual.java index ac7c6094..af125590 100644 --- a/src/main/java/org/onedatashare/transferservice/odstransferservice/pools/ThreadPoolManagerVirtual.java +++ b/src/main/java/org/onedatashare/transferservice/odstransferservice/pools/ThreadPoolManagerVirtual.java @@ -29,7 +29,9 @@ public SimpleAsyncTaskExecutor createExecutor(int threadCount, String prefix) { SimpleAsyncTaskExecutor executor = new SimpleAsyncTaskExecutor(); executor.setThreadNamePrefix(prefix); executor.setVirtualThreads(true); - executor.setConcurrencyLimit(threadCount); + if(threadCount > 0){ + executor.setConcurrencyLimit(threadCount); + } if (this.executorHashmap == null) { this.executorHashmap = new HashMap<>(); } diff --git a/src/main/java/org/onedatashare/transferservice/odstransferservice/service/HazelcastConsumer.java b/src/main/java/org/onedatashare/transferservice/odstransferservice/service/HazelcastConsumer.java index f5e70119..a858aca9 100644 --- a/src/main/java/org/onedatashare/transferservice/odstransferservice/service/HazelcastConsumer.java +++ b/src/main/java/org/onedatashare/transferservice/odstransferservice/service/HazelcastConsumer.java @@ -37,7 +37,7 @@ public HazelcastConsumer(ThreadPoolContract threadPoolContract, StopJobRequestHa this.transferParamApplicationHandler = transferApplicationParamHandler; this.logger = LoggerFactory.getLogger(HazelcastConsumer.class); this.stopJobRequestHandler = stopJobRequestHandler; - this.executor = threadPoolContract.createExecutor(-1, "HazelcastConsumer"); + this.executor = threadPoolContract.createExecutor(10, "HazelcastConsumer"); } From bcf183fce8a36b36e70ff3435efd6f43c90d74c8 Mon Sep 17 00:00:00 2001 From: Jacob Goldverg Date: Tue, 26 Nov 2024 22:46:43 -0500 Subject: [PATCH 143/150] use any available node connected to ODS --- .../odstransferservice/service/CarbonJobMeasure.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/main/java/org/onedatashare/transferservice/odstransferservice/service/CarbonJobMeasure.java b/src/main/java/org/onedatashare/transferservice/odstransferservice/service/CarbonJobMeasure.java index 6317b659..260c3811 100644 --- a/src/main/java/org/onedatashare/transferservice/odstransferservice/service/CarbonJobMeasure.java +++ b/src/main/java/org/onedatashare/transferservice/odstransferservice/service/CarbonJobMeasure.java @@ -53,7 +53,7 @@ public CarbonJobMeasure(IMap carbonIntensityMap, IMap< public List getPotentialJobsFromMap() { Predicate potentialJobs; if (odsUser.equals("OneDataShare")) { - potentialJobs = this.entryObj.get("options.transferNodeName").equal(this.appName).or(this.entryObj.get("options.transferNodeName").equal("")); + potentialJobs = this.entryObj.get("options.transferNodeName").equal(""); } else { potentialJobs = this.entryObj.get("options.transferNodeName").equal(appName).or(this.entryObj.get("source.credId").equal(appName)).or(this.entryObj.get("destination.credId").equal(appName)); } From 986ab6039ec265638bce3088490dfe7a86672edf Mon Sep 17 00:00:00 2001 From: Jacob Goldverg Date: Tue, 26 Nov 2024 23:17:54 -0500 Subject: [PATCH 144/150] Fixed querying jobs for connectors and ods based nodes --- .../service/CarbonJobMeasure.java | 20 ++++++++++++++++--- 1 file changed, 17 insertions(+), 3 deletions(-) diff --git a/src/main/java/org/onedatashare/transferservice/odstransferservice/service/CarbonJobMeasure.java b/src/main/java/org/onedatashare/transferservice/odstransferservice/service/CarbonJobMeasure.java index 260c3811..12c4af79 100644 --- a/src/main/java/org/onedatashare/transferservice/odstransferservice/service/CarbonJobMeasure.java +++ b/src/main/java/org/onedatashare/transferservice/odstransferservice/service/CarbonJobMeasure.java @@ -7,6 +7,7 @@ import com.hazelcast.query.Predicate; import com.hazelcast.query.PredicateBuilder; import com.hazelcast.query.Predicates; +import jakarta.annotation.PostConstruct; import org.onedatashare.transferservice.odstransferservice.Enum.EndpointType; import org.onedatashare.transferservice.odstransferservice.model.CarbonIpEntry; import org.onedatashare.transferservice.odstransferservice.model.CarbonMeasurement; @@ -35,6 +36,7 @@ public class CarbonJobMeasure { private final PmeterParser pmeterParser; private final ObjectMapper objectMapper; private final Logger logger = LoggerFactory.getLogger(CarbonJobMeasure.class); + private boolean odsConnector; @Value("${spring.application.name}") private String appName; @@ -48,14 +50,26 @@ public CarbonJobMeasure(IMap carbonIntensityMap, IMap< this.entryObj = Predicates.newPredicateBuilder().getEntryObject(); this.pmeterParser = pmeterParser; this.objectMapper = objectMapper; + this.odsConnector = false; + } + + @PostConstruct + public void init() { + //set ODS Connector + if(this.odsUser.equals("OneDataShare") || this.appName.equals("ODSTransferService")) { + this.odsConnector = true; + } + } public List getPotentialJobsFromMap() { Predicate potentialJobs; - if (odsUser.equals("OneDataShare")) { - potentialJobs = this.entryObj.get("options.transferNodeName").equal(""); + if (this.odsConnector) { + logger.info("{}} Querying Hazelcast for jobs", this.appName); + potentialJobs = this.entryObj.get("transferNodeName").equal(""); } else { - potentialJobs = this.entryObj.get("options.transferNodeName").equal(appName).or(this.entryObj.get("source.credId").equal(appName)).or(this.entryObj.get("destination.credId").equal(appName)); + logger.info("ODS Connector: {} Querying Hazelcast for jobs", this.appName); + potentialJobs = this.entryObj.get("transferNodeName").equal(appName).or(this.entryObj.get("source.credId").equal(appName)).or(this.entryObj.get("destination.credId").equal(appName)); } Collection jsonJobs = this.fileTransferScheduleMap.values(potentialJobs); From 544258a5aa1abd1974772d5c8f60edeb3cbcd805 Mon Sep 17 00:00:00 2001 From: Jacob Goldverg Date: Tue, 26 Nov 2024 23:26:06 -0500 Subject: [PATCH 145/150] adjust --- .../odstransferservice/service/CarbonJobMeasure.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/main/java/org/onedatashare/transferservice/odstransferservice/service/CarbonJobMeasure.java b/src/main/java/org/onedatashare/transferservice/odstransferservice/service/CarbonJobMeasure.java index 12c4af79..341224fb 100644 --- a/src/main/java/org/onedatashare/transferservice/odstransferservice/service/CarbonJobMeasure.java +++ b/src/main/java/org/onedatashare/transferservice/odstransferservice/service/CarbonJobMeasure.java @@ -121,7 +121,7 @@ public void measureCarbonOfPotentialJobs() { } catch (JsonProcessingException e) { logger.error("Failed to parse job: {} \n Error received: \t {}", transferJobRequest.toString(), e.getMessage()); } catch (IOException e) { - logger.error("Failed to measure ip: {} \n Error received: \t {}", transferJobRequest.toString(), e.getMessage()); + logger.error("Failed to measure ip: {} \n Error received: \t {}", transferJobRequest.toString(), e); } }); } From 07222c02e4c46bfa221e549cc72f77490dddf7da Mon Sep 17 00:00:00 2001 From: Jacob Goldverg Date: Tue, 26 Nov 2024 23:37:50 -0500 Subject: [PATCH 146/150] logging --- .../odstransferservice/service/CarbonJobMeasure.java | 2 ++ 1 file changed, 2 insertions(+) diff --git a/src/main/java/org/onedatashare/transferservice/odstransferservice/service/CarbonJobMeasure.java b/src/main/java/org/onedatashare/transferservice/odstransferservice/service/CarbonJobMeasure.java index 341224fb..565f722e 100644 --- a/src/main/java/org/onedatashare/transferservice/odstransferservice/service/CarbonJobMeasure.java +++ b/src/main/java/org/onedatashare/transferservice/odstransferservice/service/CarbonJobMeasure.java @@ -102,6 +102,8 @@ public void measureCarbonOfPotentialJobs() { destIp = ODSUtility.uriFromEndpointCredential(transferJobRequest.getDestination().getOauthDestCredential(), transferJobRequest.getDestination().getType()); } List totalEntries = new ArrayList<>(); + logger.info("Source IP: {}", sourceIp); + logger.info("Destination IP: {}", destIp); if (!transferJobRequest.getSource().getType().equals(EndpointType.vfs)) { totalEntries.addAll(this.pmeterParser.carbonPerIp(sourceIp)); } From a8a2664c5759918fce1b6e5e42a3775dd070797c Mon Sep 17 00:00:00 2001 From: Jacob Goldverg Date: Tue, 26 Nov 2024 23:41:45 -0500 Subject: [PATCH 147/150] removing extra logging statements --- .../odstransferservice/service/CarbonJobMeasure.java | 2 -- 1 file changed, 2 deletions(-) diff --git a/src/main/java/org/onedatashare/transferservice/odstransferservice/service/CarbonJobMeasure.java b/src/main/java/org/onedatashare/transferservice/odstransferservice/service/CarbonJobMeasure.java index 565f722e..341224fb 100644 --- a/src/main/java/org/onedatashare/transferservice/odstransferservice/service/CarbonJobMeasure.java +++ b/src/main/java/org/onedatashare/transferservice/odstransferservice/service/CarbonJobMeasure.java @@ -102,8 +102,6 @@ public void measureCarbonOfPotentialJobs() { destIp = ODSUtility.uriFromEndpointCredential(transferJobRequest.getDestination().getOauthDestCredential(), transferJobRequest.getDestination().getType()); } List totalEntries = new ArrayList<>(); - logger.info("Source IP: {}", sourceIp); - logger.info("Destination IP: {}", destIp); if (!transferJobRequest.getSource().getType().equals(EndpointType.vfs)) { totalEntries.addAll(this.pmeterParser.carbonPerIp(sourceIp)); } From 345b332da865c68d13197655e6b4cae03084c85a Mon Sep 17 00:00:00 2001 From: Jacob Goldverg Date: Mon, 2 Dec 2024 16:52:12 -0500 Subject: [PATCH 148/150] ensured proper discovery of NIC in a much quicker way --- ...sferNodeRegistrationLifeCycleListener.java | 57 ----------------- .../service/PmeterParser.java | 62 +++++-------------- src/main/resources/application.properties | 2 +- 3 files changed, 16 insertions(+), 105 deletions(-) delete mode 100644 src/main/java/org/onedatashare/transferservice/odstransferservice/service/FileTransferNodeRegistrationLifeCycleListener.java diff --git a/src/main/java/org/onedatashare/transferservice/odstransferservice/service/FileTransferNodeRegistrationLifeCycleListener.java b/src/main/java/org/onedatashare/transferservice/odstransferservice/service/FileTransferNodeRegistrationLifeCycleListener.java deleted file mode 100644 index bba0811b..00000000 --- a/src/main/java/org/onedatashare/transferservice/odstransferservice/service/FileTransferNodeRegistrationLifeCycleListener.java +++ /dev/null @@ -1,57 +0,0 @@ -//package org.onedatashare.transferservice.odstransferservice.service; -// -//import com.fasterxml.jackson.core.JsonProcessingException; -//import com.fasterxml.jackson.databind.ObjectMapper; -//import com.hazelcast.core.HazelcastInstance; -//import com.hazelcast.core.HazelcastJsonValue; -//import com.hazelcast.map.IMap; -//import jakarta.annotation.PostConstruct; -//import org.onedatashare.transferservice.odstransferservice.model.FileTransferNodeMetaData; -//import org.slf4j.Logger; -//import org.slf4j.LoggerFactory; -//import org.springframework.core.env.Environment; -//import org.springframework.stereotype.Service; -// -//import java.util.UUID; -// -//@Service -//public class FileTransferNodeRegistrationLifeCycleListener { -// -// private final HazelcastInstance hazelcastInstance; -// private final ObjectMapper objectMapper; -// private final IMap fileTransferNodeMap; -// private final String appName; -// private final String odsOwner; -// private final UUID nodeUuid; -// Logger logger = LoggerFactory.getLogger(FileTransferNodeRegistrationService.class); -// -// public FileTransferNodeRegistrationLifeCycleListener(HazelcastInstance hazelcastInstance, Environment environment, ObjectMapper objectMapper) { -// this.hazelcastInstance = hazelcastInstance; -// this.appName = environment.getProperty("spring.application.name"); -// this.odsOwner = environment.getProperty("ods.user"); -// this.objectMapper = objectMapper; -// this.nodeUuid = hazelcastInstance.getLocalEndpoint().getUuid(); -// this.fileTransferNodeMap = hazelcastInstance.getMap("file-transfer-node-map"); -// } -// -// @PostConstruct -// public void postConstruct() { -// FileTransferNodeMetaData fileTransferNodeMetaData = FileTransferNodeMetaData.builder() -// .nodeUuid(this.hazelcastInstance.getLocalEndpoint().getUuid()) -// .online(true) -// .nodeName(this.appName) -// .odsOwner(this.odsOwner) -// .nodeUuid(this.nodeUuid) -// .runningJob(false) -// .jobUuid(new UUID(0, 0)) -// .jobId(-1) -// .build(); -// try { -// String json = this.objectMapper.writeValueAsString(fileTransferNodeMetaData); -// logger.info("Registering client: {}", fileTransferNodeMetaData); -// this.fileTransferNodeMap.put(this.appName, new HazelcastJsonValue(json)); -// } catch (JsonProcessingException e) { -// e.printStackTrace(); -// } -// } -//} diff --git a/src/main/java/org/onedatashare/transferservice/odstransferservice/service/PmeterParser.java b/src/main/java/org/onedatashare/transferservice/odstransferservice/service/PmeterParser.java index 229c20a5..564f7c3d 100644 --- a/src/main/java/org/onedatashare/transferservice/odstransferservice/service/PmeterParser.java +++ b/src/main/java/org/onedatashare/transferservice/odstransferservice/service/PmeterParser.java @@ -5,6 +5,7 @@ import com.fasterxml.jackson.databind.SerializationFeature; import com.fasterxml.jackson.datatype.jsr310.JavaTimeModule; import jakarta.annotation.PostConstruct; +import lombok.SneakyThrows; import org.apache.commons.exec.CommandLine; import org.apache.commons.exec.DefaultExecutor; import org.apache.commons.exec.ExecuteWatchdog; @@ -20,11 +21,9 @@ import java.io.ByteArrayOutputStream; import java.io.IOException; -import java.net.Inet6Address; import java.net.InetAddress; -import java.net.InetSocketAddress; import java.net.NetworkInterface; -import java.nio.channels.SocketChannel; +import java.net.Socket; import java.nio.file.Files; import java.nio.file.Path; import java.nio.file.Paths; @@ -39,6 +38,7 @@ public class PmeterParser { private final DefaultExecutor pmeterExecutor; private final ExecuteWatchdog watchDog; + @Value("${pmeter.nic}") public String pmeterNic; Logger logger = LoggerFactory.getLogger(PmeterParser.class); @@ -80,12 +80,13 @@ public PmeterParser(Environment environment) { this.pmeterMapper = new ObjectMapper(); this.pmeterMapper.registerModule(new JavaTimeModule()); this.pmeterMapper.configure(SerializationFeature.WRITE_DATE_KEYS_AS_TIMESTAMPS, false); - this.pmeterNic = environment.getProperty("pmeter.nic", ""); + + } @PostConstruct public void init() throws IOException { - if(this.pmeterNic.isEmpty()) { + if (this.pmeterNic == null || this.pmeterNic.isEmpty()) { this.pmeterNic = this.discoverActiveNetworkInterface(); } logger.info("Interface used for monitoring: {}", this.pmeterNic); @@ -165,49 +166,16 @@ public List carbonPerIp(String ip) throws IOException { return retList; } - public String discoverActiveNetworkInterface() throws IOException { + @SneakyThrows + public String discoverActiveNetworkInterface() { // iterate over the network interfaces known to java - Enumeration interfaces = NetworkInterface.getNetworkInterfaces(); - for (NetworkInterface interface_ : Collections.list(interfaces)) { - // we shouldn't care about loopback addresses - if (interface_.isLoopback()) - continue; - - // if you don't expect the interface to be up you can skip this - // though it would question the usability of the rest of the code - if (!interface_.isUp()) - continue; - - // iterate over the addresses associated with the interface - Enumeration addresses = interface_.getInetAddresses(); - for (InetAddress address : Collections.list(addresses)) { - // look only for ipv4 addresses - logger.info(address.getHostAddress()); - if (address instanceof Inet6Address) - continue; - - // use a timeout big enough for your needs - if (!address.isReachable(3000)) - continue; - - // java 7's try-with-resources statement, so that - // we close the socket immediately after use - try (SocketChannel socket = SocketChannel.open()) { - // again, use a big enough timeout - socket.socket().setSoTimeout(3000); - - // bind the socket to your local interface - socket.bind(new InetSocketAddress(address, 8080)); - - // try to connect to *somewhere* - socket.connect(new InetSocketAddress("onedatashare.org", 80)); - } catch (IOException ex) { - continue; - } - logger.info("Interface used for Transfer-Service: {}", interface_.getDisplayName()); - return interface_.getDisplayName(); - } + try (Socket socket = new Socket("google.com", 80)) { + InetAddress localAddress = socket.getLocalAddress(); + System.out.println("Local Address: " + localAddress.getHostAddress()); + + // Get the network interface for the local address + NetworkInterface networkInterface = NetworkInterface.getByInetAddress(localAddress); + return networkInterface.getName(); } - return ""; } } diff --git a/src/main/resources/application.properties b/src/main/resources/application.properties index eda7c8c2..bdee6f0c 100644 --- a/src/main/resources/application.properties +++ b/src/main/resources/application.properties @@ -24,7 +24,7 @@ pmeter.cron.expression=${PMETER_CRON_EXP:*/5 * * * * *} pmeter.carbon.path=${PMETER_CARBON_PATH:${HOME}/.pmeter/carbon_pmeter.txt} pmeter.carbon.map=${PMETER_CARBON_MAP:${HOME}/.pmeter/carbon_ip_map.json} pmeter.carbon.toggle=${REPORT_CARBON:false} -pmeter.nic=${PMETER_NIC_INTERFACE:""} +pmeter.nic=${PMETER_NIC_INTERFACE:#{null}} #influx ods.influx.uri=${INFLUX_URI:https://influxdb.onedatashare.org} From 49602070d73f535d04b9bbbef1bb7c7076b4deeb Mon Sep 17 00:00:00 2001 From: Jacob Goldverg Date: Mon, 2 Dec 2024 18:31:32 -0500 Subject: [PATCH 149/150] ODS Connectors can compete for any job a user submits --- .../odstransferservice/service/CarbonJobMeasure.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/main/java/org/onedatashare/transferservice/odstransferservice/service/CarbonJobMeasure.java b/src/main/java/org/onedatashare/transferservice/odstransferservice/service/CarbonJobMeasure.java index 341224fb..9d90f5ca 100644 --- a/src/main/java/org/onedatashare/transferservice/odstransferservice/service/CarbonJobMeasure.java +++ b/src/main/java/org/onedatashare/transferservice/odstransferservice/service/CarbonJobMeasure.java @@ -69,7 +69,7 @@ public List getPotentialJobsFromMap() { potentialJobs = this.entryObj.get("transferNodeName").equal(""); } else { logger.info("ODS Connector: {} Querying Hazelcast for jobs", this.appName); - potentialJobs = this.entryObj.get("transferNodeName").equal(appName).or(this.entryObj.get("source.credId").equal(appName)).or(this.entryObj.get("destination.credId").equal(appName)); + potentialJobs = this.entryObj.get("transferNodeName").equal(appName).or(this.entryObj.get("source.credId").equal(appName)).or(this.entryObj.get("destination.credId").equal(appName)).or(this.entryObj.get("ownerId").equal(this.odsUser)); } Collection jsonJobs = this.fileTransferScheduleMap.values(potentialJobs); From 2a2106bc07d0e040d40642609a5b82740cf11724 Mon Sep 17 00:00:00 2001 From: Jacob Goldverg Date: Thu, 5 Dec 2024 13:28:21 -0500 Subject: [PATCH 150/150] included latest job execution for cli direct monitoring --- .../odstransferservice/controller/JobMonitor.java | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/src/main/java/org/onedatashare/transferservice/odstransferservice/controller/JobMonitor.java b/src/main/java/org/onedatashare/transferservice/odstransferservice/controller/JobMonitor.java index ff0af68e..15554efd 100644 --- a/src/main/java/org/onedatashare/transferservice/odstransferservice/controller/JobMonitor.java +++ b/src/main/java/org/onedatashare/transferservice/odstransferservice/controller/JobMonitor.java @@ -43,4 +43,11 @@ public ResponseEntity getJobExecution(@RequestParam("jobId") Optio return ResponseEntity.ok(BatchJobData.builder().build()); } } + + @GetMapping("/latest") + public ResponseEntity getLatestJobExecution() { + JobExecution jobExecution = this.jobControl.getLatestJobExecution(); + if(jobExecution == null) {return ResponseEntity.ok(null);} + return ResponseEntity.ok(BatchJobData.convertFromJobExecution(jobExecution)); + } }