diff --git a/src/uk/org/floop/jenkins_pmd/Pipelines.groovy b/src/uk/org/floop/jenkins_pmd/Pipelines.groovy index 5062e17..385566d 100644 --- a/src/uk/org/floop/jenkins_pmd/Pipelines.groovy +++ b/src/uk/org/floop/jenkins_pmd/Pipelines.groovy @@ -104,6 +104,18 @@ body.addTextBody('codelist-name', codelistName) execAndWait(path, body.build()) } + + def components(String draftsetId, String csvFilename) { + String path = "/v1/pipelines/ons-table2qb.core/components/import" + MultipartEntityBuilder body = createDrafterBody(draftsetId) + body.addBinaryBody( + 'components-csvv', + new FilePath(new File(csvFilename)).read(), + ContentType.create('text/csv', 'UTF-8'), + csvFilename + ) + execAndWait(path, body.build()) + } } @InheritConstructors diff --git a/vars/runPipeline.groovy b/vars/runPipeline.groovy deleted file mode 100644 index 5d0080f..0000000 --- a/vars/runPipeline.groovy +++ /dev/null @@ -1,34 +0,0 @@ -def call(String pipelineUrl, String draftsetId, String credentials, params) { - withCredentials([usernameColonPassword(credentialsId: credentials, variable: 'USERPASS')]) { - String boundary = UUID.randomUUID().toString() - def allParams = [ - [name: '__endpoint-type', value: 'grafter-server.destination/draftset-update'], - [name: '__endpoint', value: groovy.json.JsonOutput.toJson([ - url: "http://localhost:3001/v1/draftset/${draftsetId}/data", - headers: [Authorization: "Basic ${USERPASS.bytes.encodeBase64()}"] - ])]] + params - String body = "" - allParams.each { param -> - body += "--${boundary}\r\n" - body += 'Content-Disposition: form-data; name="' + param.name + '"' - if (param.containsKey('file')) { - body += '; filename="' + param.file.name + '"\r\nContent-Type: "' + param.file.type + '\r\n\r\n' - body += readFile(param.file.name) + '\r\n' - } else { - body += "\r\n\r\n${param.value}\r\n" - } - } - body += "--${boundary}--\r\n" - def importRequest = httpRequest(acceptType: 'APPLICATION_JSON', authentication: credentials, - httpMode: 'POST', url: pipelineUrl, requestBody: body, - customHeaders: [[name: 'Content-Type', - value: 'multipart/form-data;charset=UTF-8;boundary="' + boundary + '"']]) - if (importRequest.status == 202) { - def importJob = readJSON(text: importRequest.content) - String jobUrl = new java.net.URI(pipelineUrl).resolve(importJob['finished-job']) as String - drafter.waitForJob(jobUrl, credentials, importJob['restart-id'] as String) - } else { - error "Failed import, ${importRequest.status} : ${importRequest.content}" - } - } -} diff --git a/vars/uploadComponents.groovy b/vars/uploadComponents.groovy index eaa8621..82b8677 100644 --- a/vars/uploadComponents.groovy +++ b/vars/uploadComponents.groovy @@ -1,15 +1,7 @@ +import uk.org.floop.jenkins_pmd.PMD + def call(String csv) { - configFileProvider([configFile(fileId: 'pmdConfig', variable: 'configfile')]) { - def config = readJSON(text: readFile(file: configfile)) - String PMD = config['pmd_api'] - String credentials = config['credentials'] - String PIPELINE = config['pipeline_api'] - String baseURI = config['base_uri'] - - def draft = jobDraft.find() - - runPipeline("${PIPELINE}/ons-table2qb.core/components/import", - draft.id, credentials, [[name: 'components-csv', - file: [name: csv, type: 'text/csv']]]) - } -} + PMD pmd = pmdConfig('pmd') + def draft = jobDraft.find() + pmd.pipelines.components(draft.id as String, csv) +} \ No newline at end of file diff --git a/vars/uploadCube.groovy b/vars/uploadCube.groovy deleted file mode 100644 index b7a87b3..0000000 --- a/vars/uploadCube.groovy +++ /dev/null @@ -1,24 +0,0 @@ -def call(String datasetLabel, obslist) { - configFileProvider([configFile(fileId: 'pmdConfig', variable: 'configfile')]) { - def config = readJSON(text: readFile(file: configfile)) - String PMD = config['pmd_api'] - String credentials = config['credentials'] - String PIPELINE = config['pipeline_api'] - def jobDraft = drafter.findDraftset(PMD, credentials, env.JOB_NAME) - if (jobDraft) { - drafter.deleteDraftset(PMD, credentials, jobDraft.id as String) - } - def newJobDraft = drafter.createDraftset(PMD, credentials, env.JOB_NAME as String) - String datasetPath = util.slugise(datasetLabel) - dataset.delete(datasetLabel) - - drafter.addData(PMD, credentials, newJobDraft.id as String, - readFile("out/dataset.trig"), "application/trig;charset=UTF-8") - obslist.each { obsfile -> - echo "Uploading ${obsfile}" - drafter.addData(PMD, credentials, newJobDraft.id as String, - readFile(obsfile), "text/turtle;charset=UTF-8", - "http://gss-data.org.uk/graph/${datasetPath}") - } - } -} diff --git a/vars/uploadDraftset.groovy b/vars/uploadDraftset.groovy deleted file mode 100644 index 3d044f7..0000000 --- a/vars/uploadDraftset.groovy +++ /dev/null @@ -1,44 +0,0 @@ -def call(String datasetLabel, csvs, String mapping=null, String datasetPath=null) { - configFileProvider([configFile(fileId: 'pmdConfig', variable: 'configfile')]) { - def config = readJSON(text: readFile(file: configfile)) - String PMD = config['pmd_api'] - String credentials = config['credentials'] - String PIPELINE = config['pipeline_api'] - if (!mapping) { - if (fileExists('metadata/columns.csv')) { - mapping = 'metadata/columns.csv' - } else { - mapping = config['default_mapping'] - } - } - if (mapping.startsWith('http')) { - def response = httpRequest( - httpMode: 'GET', - url: mapping) - dir ('metadata') { - writeFile file: 'columns.csv', text: response.content - } - mapping = 'metadata/columns.csv' - } - jobDraft.replace() - def draft = jobDraft.find() - if (!datasetPath) { - datasetPath = util.slugise(datasetLabel) - } - dataset.delete(datasetPath) - drafter.addData(PMD, credentials, draft.id, - readFile("out/dataset.trig"), "application/trig;charset=UTF-8") - - csvs.each { csv -> - echo "Uploading ${csv}" - runPipeline("${PIPELINE}/ons-table2qb.core/data-cube/import", - draft.id, credentials, [ - [name: 'observations-csv', - file: [name: csv, type: 'text/csv;charset=UTF-8']], - [name: 'dataset-name', value: datasetLabel], - [name: 'dataset-slug', value: datasetPath], - [name: 'columns-csv', file: [name: mapping, type: 'text/csv;charset=UTF-8']] - ]) - } - } -}