apicurio-migration-tool/src/main/java/cz/trask/migration/impl/v32/Wso2v32ToApicurioFromDir.java
Radek Davidek 0d25f854ec some fixes
2025-12-22 21:00:36 +01:00

204 lines
7.2 KiB
Java
Raw Blame History

This file contains ambiguous Unicode characters

This file contains Unicode characters that might be confused with other characters. If you think that this is intentional, you can safely ignore this warning. Use the Escape button to reveal them.

package cz.trask.migration.impl.v32;
import java.io.ByteArrayInputStream;
import java.io.File;
import java.io.IOException;
import java.nio.file.Files;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicInteger;
import com.fasterxml.jackson.databind.node.ObjectNode;
import cz.trask.migration.AbstractProcess;
import cz.trask.migration.model.APIInfo;
import cz.trask.migration.model.FileType;
import cz.trask.migration.model.ZipEntryData;
import cz.trask.migration.model.v32.ApiDefinition32;
import io.apicurio.registry.rest.client.exception.VersionAlreadyExistsException;
import io.apicurio.registry.rest.v2.beans.ArtifactMetaData;
import io.apicurio.registry.rest.v2.beans.ArtifactReference;
import io.apicurio.registry.rest.v2.beans.VersionSearchResults;
import io.apicurio.registry.types.RuleType;
import lombok.extern.log4j.Log4j2;
@Log4j2
public class Wso2v32ToApicurioFromDir extends AbstractProcess {
private final AtomicInteger apiCounter = new AtomicInteger(1);
/**
* Main entry point for the import process.
*
* @throws RuntimeException if any error occurs
*/
public void process() {
try {
log.info("Starting API import to Apicurio from WSO2 Directory...");
File[] apiFiles = new File(config.getSource().getWso2ApisDir())
.listFiles((dir, name) -> name.endsWith(".zip"));
if (apiFiles == null || apiFiles.length == 0) {
log.warn("No API zip files found in directory: {}", config.getSource().getWso2ApisDir());
return;
}
log.info("Found {} APIs", apiFiles.length);
int maxThreads = config.getMaxThreads();
ExecutorService executor = Executors.newFixedThreadPool(maxThreads);
for (File api : apiFiles) {
final int index = apiCounter.getAndIncrement();
executor.submit(() -> processApi(api, index, apiFiles.length));
}
executor.shutdown();
if (!executor.awaitTermination(10, TimeUnit.MINUTES)) {
log.warn("Timeout waiting for API import tasks to finish");
}
log.info("Finished processing APIs.");
} catch (Exception e) {
log.error("Error while exporting APIs.", e);
throw new RuntimeException("Export failed", e);
}
}
/**
* Process a single API fetches the data, creates or updates the corresponding
* artifact in Apicurio.
*/
private void processApi(File apiFile, int index, int total) {
long start = System.currentTimeMillis();
try {
log.info("Processing API {} of {}", index, total);
List<ZipEntryData> zipEntries = ZipUtils.extractFilesFromZip(Files.readAllBytes(apiFile.toPath()));
String swagger = null;
String apiDefStr = null;
for (ZipEntryData e : zipEntries) {
if (swagger != null && apiDefStr != null) {
break;
}
if (e.getType().toString().equals(FileType.OPENAPI.toString())) {
log.debug("Found main API swagger file: {}", e.getName());
swagger = new String(e.getContent());
} else if (e.getType().toString().equals(FileType.APIDEF.toString())) {
log.debug("Found main API definition file: {}", e.getName());
apiDefStr = new String(e.getContent());
}
}
ApiDefinition32 apiDef = mapper.readValue(apiDefStr, ApiDefinition32.class);
Map<String, Object> swaggerMap = mapper.readValue(swagger, Map.class);
APIInfo api = new APIInfo();
api.setName(apiDef.getId().getApiName());
api.setVersion(apiDef.getId().getVersion());
api.setContext(apiDef.getContext().substring(0, apiDef.getContext().indexOf('/', 1)));
api.setDescription((String) ((Map<String, Object>) swaggerMap.get("info")).get("description"));
api.setType(apiDef.getType());
api.setId(apiDef.getUuid());
System.out.println("Context: " + apiDef.getContext());
@SuppressWarnings("unchecked")
List<String> tagsList = (List<String>) apiDef.getTags();
Map<String, String> props = new LinkedHashMap<>();
props.put("version", api.getVersion());
props.put("status", apiDef.getStatus());
props.put(PARAM_SOURCE_APIM, VERSION_32);
// addSubscriptionsToProps(props, subs);
// addEndpointsToProps(props, apiMap);
addTagsToProps(props, tagsList);
String baseDesc = api.getDescription() != null ? api.getDescription() : "";
String pubUrl = config.getPatterns().getPublisherUrlPattern().replace("{API_ID}",
api.getId());
String devPortUrl = config.getPatterns().getDevPortalUrlPattern().replace("{API_ID}",
api.getId());
String fullDesc = baseDesc + " ***** PUBLISHER URL ***** " + pubUrl + " ***** DEVPORTAL URL ***** "
+ devPortUrl;
ObjectNode swaggerObj = mapperYaml.valueToTree(swaggerMap);
String group = config.getApicurio().getDefaultApiGroup();
String mainArtifactId = api.getName() + api.getContext();
VersionSearchResults existingArtifacts;
try {
existingArtifacts = client.listArtifactVersions(group, mainArtifactId, 0,
Integer.MAX_VALUE);
} catch (Exception e) {
log.debug("No API {} exists will create it", api.getContext());
existingArtifacts = null;
}
if (existingArtifacts == null) {
// Create new artifact
List<ArtifactReference> references = createReferencesFromZip(zipEntries,
api);
// addSubscriptionsToReferences(references, subs, api);
ArtifactMetaData meta = client.createArtifact(group, mainArtifactId,
api.getVersion(), null, null, null,
api.getName(), fullDesc, null, null, null,
new ByteArrayInputStream(swaggerObj.toString().getBytes()), references);
setArtifactMetaData(meta, api.getName(), fullDesc, props);
// Create the three required rules
createRule(meta, "NONE", RuleType.COMPATIBILITY);
createRule(meta, "NONE", RuleType.VALIDITY);
createRule(meta, "NONE", RuleType.INTEGRITY);
} else {
// Artifact exists check if the version exists
boolean versionExists = false;
try {
client.getArtifactVersionMetaData(group, mainArtifactId, api.getVersion());
versionExists = true;
} catch (Exception e) {
// Version missing will create it below
}
List<ArtifactReference> references = createReferencesFromZip(zipEntries,
api);
// addSubscriptionsToReferences(references, subs, api);
if (!versionExists) {
ArtifactMetaData meta = client.updateArtifact(group, mainArtifactId,
api.getVersion(),
api.getName(), fullDesc, new ByteArrayInputStream(swaggerObj.toString().getBytes()),
references);
setArtifactMetaData(meta, api.getName(), fullDesc, props);
} else {
// Version already exists no action needed
log.warn("API {} with version {} already exists. Skipping import.",
api.getContext(),
api.getVersion());
}
}
log.info("Successfully imported API '{}' ({}). Took {} ms", api.getName(),
api.getVersion(),
System.currentTimeMillis() - start);
} catch (IOException e) {
log.error("IO error while importing API file {}: {}", apiFile.getName(), e.getMessage(), e);
} catch (VersionAlreadyExistsException e) {
log.warn("API version already exists for file: {}. Skipping.", apiFile.getName(), e.getMessage(), e);
} catch (Exception e) {
log.error("Cannot export API '{}': {}", apiFile.getName(), e.getMessage(), e);
}
}
}