@@ -13,16 +13,23 @@ Export-Package: org.eclipse.jgit.lfs.server;version="4.3.0"; | |||
javax.servlet.http, | |||
org.eclipse.jgit.lfs.server, | |||
org.eclipse.jgit.lfs.lib", | |||
org.eclipse.jgit.lfs.server.internal;version="4.3.0";x-internal:=true | |||
org.eclipse.jgit.lfs.server.internal;version="4.3.0";x-internal:=true, | |||
org.eclipse.jgit.lfs.server.s3;version="4.3.0"; | |||
uses:="org.eclipse.jgit.lfs.server, | |||
org.eclipse.jgit.lfs.lib" | |||
Bundle-RequiredExecutionEnvironment: JavaSE-1.7 | |||
Import-Package: com.google.gson;version="[2.2.4,3.0.0)", | |||
javax.servlet;version="[3.1.0,4.0.0)", | |||
javax.servlet.annotation;version="[3.1.0,4.0.0)", | |||
javax.servlet.http;version="[3.1.0,4.0.0)", | |||
org.apache.http;version="[4.3.0,5.0.0)", | |||
org.apache.http.client;version="[4.3.0,5.0.0)", | |||
org.eclipse.jgit.annotations;version="[4.3.0,4.4.0)", | |||
org.eclipse.jgit.internal;version="[4.3.0,4.4.0)", | |||
org.eclipse.jgit.internal.storage.file;version="[4.3.0,4.4.0)", | |||
org.eclipse.jgit.lfs.errors;version="[4.3.0,4.4.0)", | |||
org.eclipse.jgit.lfs.lib;version="[4.3.0,4.4.0)", | |||
org.eclipse.jgit.nls;version="[4.3.0,4.4.0)", | |||
org.eclipse.jgit.transport.http;version="[4.3.0,4.4.0)", | |||
org.eclipse.jgit.transport.http.apache;version="[4.3.0,4.4.0)", | |||
org.eclipse.jgit.util;version="[4.3.0,4.4.0)" |
@@ -77,6 +77,12 @@ | |||
<version>${project.version}</version> | |||
</dependency> | |||
<dependency> | |||
<groupId>org.eclipse.jgit</groupId> | |||
<artifactId>org.eclipse.jgit.http.apache</artifactId> | |||
<version>${project.version}</version> | |||
</dependency> | |||
<dependency> | |||
<groupId>javax.servlet</groupId> | |||
<artifactId>javax.servlet-api</artifactId> |
@@ -1,4 +1,12 @@ | |||
corruptLongObject=The content hash ''{0}'' of the long object ''{1}'' doesn''t match its id, the corrupt object will be deleted. | |||
failedToCalcSignature=Failed to calculate a request signature: {0} | |||
invalidPathInfo=Invalid pathInfo ''{0}'' does not match ''/'{'SHA-256'}''' | |||
objectNotFound=Object ''{0}'' not found | |||
undefinedS3AccessKey=S3 configuration: ''accessKey'' is undefined | |||
undefinedS3Bucket=S3 configuration: ''bucket'' is undefined | |||
undefinedS3Region=S3 configuration: ''region'' is undefined | |||
undefinedS3SecretKey=S3 configuration: ''secretKey'' is undefined | |||
undefinedS3StorageClass=S3 configuration: ''storageClass'' is undefined | |||
unparsableEndpoint=Unable to parse service endpoint: {0} | |||
unsupportedOperation=Operation ''{0}'' is not supported | |||
unsupportedUtf8=UTF-8 encoding is not supported. |
@@ -59,7 +59,15 @@ public class LfsServerText extends TranslationBundle { | |||
// @formatter:off | |||
/***/ public String corruptLongObject; | |||
/***/ public String failedToCalcSignature; | |||
/***/ public String invalidPathInfo; | |||
/***/ public String objectNotFound; | |||
/***/ public String undefinedS3AccessKey; | |||
/***/ public String undefinedS3Bucket; | |||
/***/ public String undefinedS3Region; | |||
/***/ public String undefinedS3SecretKey; | |||
/***/ public String undefinedS3StorageClass; | |||
/***/ public String unparsableEndpoint; | |||
/***/ public String unsupportedOperation; | |||
/***/ public String unsupportedUtf8; | |||
} |
@@ -0,0 +1,141 @@ | |||
/* | |||
* Copyright (C) 2015, Matthias Sohn <matthias.sohn@sap.com> | |||
* Copyright (C) 2015, Sasa Zivkov <sasa.zivkov@sap.com> | |||
* and other copyright owners as documented in the project's IP log. | |||
* | |||
* This program and the accompanying materials are made available | |||
* under the terms of the Eclipse Distribution License v1.0 which | |||
* accompanies this distribution, is reproduced below, and is | |||
* available at http://www.eclipse.org/org/documents/edl-v10.php | |||
* | |||
* All rights reserved. | |||
* | |||
* Redistribution and use in source and binary forms, with or | |||
* without modification, are permitted provided that the following | |||
* conditions are met: | |||
* | |||
* - Redistributions of source code must retain the above copyright | |||
* notice, this list of conditions and the following disclaimer. | |||
* | |||
* - Redistributions in binary form must reproduce the above | |||
* copyright notice, this list of conditions and the following | |||
* disclaimer in the documentation and/or other materials provided | |||
* with the distribution. | |||
* | |||
* - Neither the name of the Eclipse Foundation, Inc. nor the | |||
* names of its contributors may be used to endorse or promote | |||
* products derived from this software without specific prior | |||
* written permission. | |||
* | |||
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND | |||
* CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, | |||
* INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES | |||
* OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE | |||
* ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR | |||
* CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, | |||
* SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT | |||
* NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; | |||
* LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER | |||
* CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, | |||
* STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) | |||
* ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF | |||
* ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. | |||
*/ | |||
package org.eclipse.jgit.lfs.server.s3; | |||
/** | |||
* Configuration for an Amazon AWS S3 bucket | |||
* | |||
* @since 4.3 | |||
*/ | |||
public class S3Config { | |||
private final String region; | |||
private final String bucket; | |||
private final String storageClass; | |||
private final String accessKey; | |||
private final String secretKey; | |||
private final int expirationSeconds; | |||
private final boolean disableSslVerify; | |||
/** | |||
* @param region | |||
* AWS region | |||
* @param bucket | |||
* S3 storage bucket | |||
* @param storageClass | |||
* S3 storage class | |||
* @param accessKey | |||
* access key for authenticating to AWS | |||
* @param secretKey | |||
* secret key for authenticating to AWS | |||
* @param expirationSeconds | |||
* period in seconds after which requests signed for this bucket | |||
* will expire | |||
* @param disableSslVerify | |||
* if {@code true} disable Amazon server certificate and hostname | |||
* verification | |||
*/ | |||
public S3Config(String region, String bucket, String storageClass, | |||
String accessKey, String secretKey, int expirationSeconds, | |||
boolean disableSslVerify) { | |||
this.region = region; | |||
this.bucket = bucket; | |||
this.storageClass = storageClass; | |||
this.accessKey = accessKey; | |||
this.secretKey = secretKey; | |||
this.expirationSeconds = expirationSeconds; | |||
this.disableSslVerify = disableSslVerify; | |||
} | |||
/** | |||
* @return Get name of AWS region this bucket resides in | |||
*/ | |||
public String getRegion() { | |||
return region; | |||
} | |||
/** | |||
* @return Get S3 storage bucket name | |||
*/ | |||
public String getBucket() { | |||
return bucket; | |||
} | |||
/** | |||
* @return S3 storage class to use for objects stored in this bucket | |||
*/ | |||
public String getStorageClass() { | |||
return storageClass; | |||
} | |||
/** | |||
* @return access key for authenticating to AWS | |||
*/ | |||
public String getAccessKey() { | |||
return accessKey; | |||
} | |||
/** | |||
* @return secret key for authenticating to AWS | |||
*/ | |||
public String getSecretKey() { | |||
return secretKey; | |||
} | |||
/** | |||
* @return period in seconds after which requests signed for this bucket | |||
* will expire | |||
*/ | |||
public int getExpirationSeconds() { | |||
return expirationSeconds; | |||
} | |||
/** | |||
* @return {@code true} if Amazon server certificate and hostname | |||
* verification is disabled | |||
*/ | |||
boolean isDisableSslVerify() { | |||
return disableSslVerify; | |||
} | |||
} |
@@ -0,0 +1,217 @@ | |||
/* | |||
* Copyright (C) 2015, Matthias Sohn <matthias.sohn@sap.com> | |||
* Copyright (C) 2015, Sasa Zivkov <sasa.zivkov@sap.com> | |||
* and other copyright owners as documented in the project's IP log. | |||
* | |||
* This program and the accompanying materials are made available | |||
* under the terms of the Eclipse Distribution License v1.0 which | |||
* accompanies this distribution, is reproduced below, and is | |||
* available at http://www.eclipse.org/org/documents/edl-v10.php | |||
* | |||
* All rights reserved. | |||
* | |||
* Redistribution and use in source and binary forms, with or | |||
* without modification, are permitted provided that the following | |||
* conditions are met: | |||
* | |||
* - Redistributions of source code must retain the above copyright | |||
* notice, this list of conditions and the following disclaimer. | |||
* | |||
* - Redistributions in binary form must reproduce the above | |||
* copyright notice, this list of conditions and the following | |||
* disclaimer in the documentation and/or other materials provided | |||
* with the distribution. | |||
* | |||
* - Neither the name of the Eclipse Foundation, Inc. nor the | |||
* names of its contributors may be used to endorse or promote | |||
* products derived from this software without specific prior | |||
* written permission. | |||
* | |||
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND | |||
* CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, | |||
* INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES | |||
* OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE | |||
* ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR | |||
* CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, | |||
* SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT | |||
* NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; | |||
* LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER | |||
* CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, | |||
* STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) | |||
* ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF | |||
* ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. | |||
*/ | |||
package org.eclipse.jgit.lfs.server.s3; | |||
import static javax.servlet.http.HttpServletResponse.SC_OK; | |||
import static org.eclipse.jgit.lfs.server.s3.SignerV4.UNSIGNED_PAYLOAD; | |||
import static org.eclipse.jgit.lfs.server.s3.SignerV4.X_AMZ_CONTENT_SHA256; | |||
import static org.eclipse.jgit.lfs.server.s3.SignerV4.X_AMZ_EXPIRES; | |||
import static org.eclipse.jgit.lfs.server.s3.SignerV4.X_AMZ_STORAGE_CLASS; | |||
import static org.eclipse.jgit.util.HttpSupport.HDR_CONTENT_LENGTH; | |||
import static org.eclipse.jgit.util.HttpSupport.METHOD_GET; | |||
import static org.eclipse.jgit.util.HttpSupport.METHOD_HEAD; | |||
import static org.eclipse.jgit.util.HttpSupport.METHOD_PUT; | |||
import java.io.IOException; | |||
import java.net.MalformedURLException; | |||
import java.net.Proxy; | |||
import java.net.ProxySelector; | |||
import java.net.URL; | |||
import java.text.MessageFormat; | |||
import java.util.HashMap; | |||
import java.util.Map; | |||
import org.eclipse.jgit.lfs.lib.AnyLongObjectId; | |||
import org.eclipse.jgit.lfs.server.LargeFileRepository; | |||
import org.eclipse.jgit.lfs.server.Response; | |||
import org.eclipse.jgit.lfs.server.Response.Action; | |||
import org.eclipse.jgit.lfs.server.internal.LfsServerText; | |||
import org.eclipse.jgit.transport.http.HttpConnection; | |||
import org.eclipse.jgit.transport.http.apache.HttpClientConnectionFactory; | |||
import org.eclipse.jgit.util.HttpSupport; | |||
/** | |||
* Repository storing LFS objects in Amazon S3 | |||
* | |||
* @since 4.3 | |||
*/ | |||
public class S3Repository implements LargeFileRepository { | |||
private S3Config s3Config; | |||
/** | |||
* Construct a LFS repository storing large objects in Amazon S3 | |||
* | |||
* @param config | |||
* AWS S3 storage bucket configuration | |||
*/ | |||
public S3Repository(S3Config config) { | |||
validateConfig(config); | |||
this.s3Config = config; | |||
} | |||
@Override | |||
public Response.Action getDownloadAction(AnyLongObjectId oid) { | |||
URL endpointUrl = getObjectUrl(oid); | |||
Map<String, String> queryParams = new HashMap<String, String>(); | |||
queryParams.put(X_AMZ_EXPIRES, | |||
Integer.toString(s3Config.getExpirationSeconds())); | |||
Map<String, String> headers = new HashMap<String, String>(); | |||
String authorizationQueryParameters = SignerV4.createAuthorizationQuery( | |||
s3Config, endpointUrl, METHOD_GET, headers, queryParams, | |||
UNSIGNED_PAYLOAD); | |||
Response.Action a = new Response.Action(); | |||
a.href = endpointUrl.toString() + "?" + authorizationQueryParameters; //$NON-NLS-1$ | |||
return a; | |||
} | |||
@Override | |||
public Response.Action getUploadAction(AnyLongObjectId oid, long size) { | |||
cacheObjectMetaData(oid, size); | |||
URL objectUrl = getObjectUrl(oid); | |||
Map<String, String> headers = new HashMap<String, String>(); | |||
headers.put(X_AMZ_CONTENT_SHA256, oid.getName()); | |||
headers.put(HDR_CONTENT_LENGTH, Long.toString(size)); | |||
headers.put(X_AMZ_STORAGE_CLASS, s3Config.getStorageClass()); | |||
headers.put(HttpSupport.HDR_CONTENT_TYPE, "application/octet-stream"); //$NON-NLS-1$ | |||
headers = SignerV4.createHeaderAuthorization(s3Config, objectUrl, | |||
METHOD_PUT, headers, oid.getName()); | |||
Response.Action a = new Response.Action(); | |||
a.href = objectUrl.toString(); | |||
a.header = new HashMap<>(); | |||
a.header.putAll(headers); | |||
return a; | |||
} | |||
@Override | |||
public Action getVerifyAction(AnyLongObjectId id) { | |||
return null; // TODO(ms) implement this | |||
} | |||
@Override | |||
public long getSize(AnyLongObjectId oid) throws IOException { | |||
URL endpointUrl = getObjectUrl(oid); | |||
Map<String, String> queryParams = new HashMap<String, String>(); | |||
queryParams.put(X_AMZ_EXPIRES, | |||
Integer.toString(s3Config.getExpirationSeconds())); | |||
Map<String, String> headers = new HashMap<String, String>(); | |||
String authorizationQueryParameters = SignerV4.createAuthorizationQuery( | |||
s3Config, endpointUrl, METHOD_HEAD, headers, queryParams, | |||
UNSIGNED_PAYLOAD); | |||
String href = endpointUrl.toString() + "?" //$NON-NLS-1$ | |||
+ authorizationQueryParameters; | |||
Proxy proxy = HttpSupport.proxyFor(ProxySelector.getDefault(), | |||
endpointUrl); | |||
HttpClientConnectionFactory f = new HttpClientConnectionFactory(); | |||
HttpConnection conn = f.create(new URL(href), proxy); | |||
if (s3Config.isDisableSslVerify()) { | |||
HttpSupport.disableSslVerify(conn); | |||
} | |||
conn.setRequestMethod(METHOD_HEAD); | |||
conn.connect(); | |||
int status = conn.getResponseCode(); | |||
if (status == SC_OK) { | |||
String contentLengthHeader = conn | |||
.getHeaderField(HDR_CONTENT_LENGTH); | |||
if (contentLengthHeader != null) { | |||
return Integer.parseInt(contentLengthHeader); | |||
} | |||
} | |||
return -1; | |||
} | |||
/** | |||
* Cache metadata (size) for an object to avoid extra roundtrip to S3 in | |||
* order to retrieve this metadata for a given object. Subclasses can | |||
* implement a local cache and override {{@link #getSize(AnyLongObjectId)} | |||
* to retrieve the object size from the local cache to eliminate the need | |||
* for another roundtrip to S3 | |||
* | |||
* @param oid | |||
* the object id identifying the object to be cached | |||
* @param size | |||
* the object's size (in bytes) | |||
*/ | |||
protected void cacheObjectMetaData(AnyLongObjectId oid, long size) { | |||
// no caching | |||
} | |||
private void validateConfig(S3Config config) { | |||
assertNotEmpty(LfsServerText.get().undefinedS3AccessKey, | |||
config.getAccessKey()); | |||
assertNotEmpty(LfsServerText.get().undefinedS3Bucket, | |||
config.getBucket()); | |||
assertNotEmpty(LfsServerText.get().undefinedS3Region, | |||
config.getRegion()); | |||
assertNotEmpty(LfsServerText.get().undefinedS3SecretKey, | |||
config.getSecretKey()); | |||
assertNotEmpty(LfsServerText.get().undefinedS3StorageClass, | |||
config.getStorageClass()); | |||
} | |||
private void assertNotEmpty(String message, String value) { | |||
if (value == null || value.trim().length() == 0) { | |||
throw new IllegalArgumentException(message); | |||
} | |||
} | |||
private URL getObjectUrl(AnyLongObjectId oid) { | |||
try { | |||
return new URL(String.format("https://s3-%s.amazonaws.com/%s/%s", //$NON-NLS-1$ | |||
s3Config.getRegion(), s3Config.getBucket(), | |||
getPath(oid))); | |||
} catch (MalformedURLException e) { | |||
throw new IllegalArgumentException(MessageFormat.format( | |||
LfsServerText.get().unparsableEndpoint, e.getMessage())); | |||
} | |||
} | |||
private String getPath(AnyLongObjectId oid) { | |||
return oid.getName(); | |||
} | |||
} |
@@ -0,0 +1,420 @@ | |||
/* | |||
* Copyright (C) 2015, Matthias Sohn <matthias.sohn@sap.com> | |||
* Copyright (C) 2015, Sasa Zivkov <sasa.zivkov@sap.com> | |||
* and other copyright owners as documented in the project's IP log. | |||
* | |||
* This program and the accompanying materials are made available | |||
* under the terms of the Eclipse Distribution License v1.0 which | |||
* accompanies this distribution, is reproduced below, and is | |||
* available at http://www.eclipse.org/org/documents/edl-v10.php | |||
* | |||
* All rights reserved. | |||
* | |||
* Redistribution and use in source and binary forms, with or | |||
* without modification, are permitted provided that the following | |||
* conditions are met: | |||
* | |||
* - Redistributions of source code must retain the above copyright | |||
* notice, this list of conditions and the following disclaimer. | |||
* | |||
* - Redistributions in binary form must reproduce the above | |||
* copyright notice, this list of conditions and the following | |||
* disclaimer in the documentation and/or other materials provided | |||
* with the distribution. | |||
* | |||
* - Neither the name of the Eclipse Foundation, Inc. nor the | |||
* names of its contributors may be used to endorse or promote | |||
* products derived from this software without specific prior | |||
* written permission. | |||
* | |||
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND | |||
* CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, | |||
* INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES | |||
* OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE | |||
* ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR | |||
* CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, | |||
* SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT | |||
* NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; | |||
* LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER | |||
* CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, | |||
* STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) | |||
* ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF | |||
* ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. | |||
*/ | |||
package org.eclipse.jgit.lfs.server.s3; | |||
import static org.eclipse.jgit.util.HttpSupport.HDR_AUTHORIZATION; | |||
import java.io.UnsupportedEncodingException; | |||
import java.net.URL; | |||
import java.net.URLEncoder; | |||
import java.nio.charset.StandardCharsets; | |||
import java.security.MessageDigest; | |||
import java.text.MessageFormat; | |||
import java.text.SimpleDateFormat; | |||
import java.util.ArrayList; | |||
import java.util.Collections; | |||
import java.util.Date; | |||
import java.util.Iterator; | |||
import java.util.List; | |||
import java.util.Map; | |||
import java.util.SimpleTimeZone; | |||
import java.util.SortedMap; | |||
import java.util.TreeMap; | |||
import javax.crypto.Mac; | |||
import javax.crypto.spec.SecretKeySpec; | |||
import org.eclipse.jgit.lfs.lib.Constants; | |||
import org.eclipse.jgit.lfs.server.internal.LfsServerText; | |||
/** | |||
* Signing support for Amazon AWS signing V4 | |||
* <p> | |||
* See | |||
* http://docs.aws.amazon.com/AmazonS3/latest/API/sig-v4-authenticating-requests.html | |||
*/ | |||
class SignerV4 { | |||
static final String UNSIGNED_PAYLOAD = "UNSIGNED-PAYLOAD"; //$NON-NLS-1$ | |||
private static final String ALGORITHM = "HMAC-SHA256"; //$NON-NLS-1$ | |||
private static final String DATE_STRING_FORMAT = "yyyyMMdd"; //$NON-NLS-1$ | |||
private static final String HEX = "0123456789abcdef"; //$NON-NLS-1$ | |||
private static final String HMACSHA256 = "HmacSHA256"; //$NON-NLS-1$ | |||
private static final String ISO8601_BASIC_FORMAT = "yyyyMMdd'T'HHmmss'Z'"; //$NON-NLS-1$ | |||
private static final String S3 = "s3"; //$NON-NLS-1$ | |||
private static final String SCHEME = "AWS4"; //$NON-NLS-1$ | |||
private static final String TERMINATOR = "aws4_request"; //$NON-NLS-1$ | |||
private static final String UTC = "UTC"; //$NON-NLS-1$ | |||
private static final String X_AMZ_ALGORITHM = "X-Amz-Algorithm"; //$NON-NLS-1$ | |||
private static final String X_AMZ_CREDENTIAL = "X-Amz-Credential"; //$NON-NLS-1$ | |||
private static final String X_AMZ_DATE = "X-Amz-Date"; //$NON-NLS-1$ | |||
private static final String X_AMZ_SIGNATURE = "X-Amz-Signature"; //$NON-NLS-1$ | |||
private static final String X_AMZ_SIGNED_HEADERS = "X-Amz-SignedHeaders"; //$NON-NLS-1$ | |||
static final String X_AMZ_CONTENT_SHA256 = "x-amz-content-sha256"; //$NON-NLS-1$ | |||
static final String X_AMZ_EXPIRES = "X-Amz-Expires"; //$NON-NLS-1$ | |||
static final String X_AMZ_STORAGE_CLASS = "x-amz-storage-class"; //$NON-NLS-1$ | |||
/** | |||
* Create an AWSV4 authorization for a request, suitable for embedding in | |||
* query parameters. | |||
* | |||
* @param bucketConfig | |||
* configuration of S3 storage bucket this request should be | |||
* signed for | |||
* @param url | |||
* HTTP request URL | |||
* @param httpMethod | |||
* HTTP method | |||
* @param headers | |||
* The HTTP request headers; 'Host' and 'X-Amz-Date' will be | |||
* added to this set. | |||
* @param queryParameters | |||
* Any query parameters that will be added to the endpoint. The | |||
* parameters should be specified in canonical format. | |||
* @param bodyHash | |||
* Pre-computed SHA256 hash of the request body content; this | |||
* value should also be set as the header 'X-Amz-Content-SHA256' | |||
* for non-streaming uploads. | |||
* @return The computed authorization string for the request. This value | |||
* needs to be set as the header 'Authorization' on the subsequent | |||
* HTTP request. | |||
*/ | |||
static String createAuthorizationQuery(S3Config bucketConfig, URL url, | |||
String httpMethod, Map<String, String> headers, | |||
Map<String, String> queryParameters, String bodyHash) { | |||
addHostHeader(url, headers); | |||
queryParameters.put(X_AMZ_ALGORITHM, SCHEME + "-" + ALGORITHM); //$NON-NLS-1$ | |||
Date now = new Date(); | |||
String dateStamp = dateStamp(now); | |||
String scope = scope(bucketConfig.getRegion(), dateStamp); | |||
queryParameters.put(X_AMZ_CREDENTIAL, | |||
bucketConfig.getAccessKey() + "/" + scope); //$NON-NLS-1$ | |||
String dateTimeStampISO8601 = dateTimeStampISO8601(now); | |||
queryParameters.put(X_AMZ_DATE, dateTimeStampISO8601); | |||
String canonicalizedHeaderNames = canonicalizeHeaderNames(headers); | |||
queryParameters.put(X_AMZ_SIGNED_HEADERS, canonicalizedHeaderNames); | |||
String canonicalizedQueryParameters = canonicalizeQueryString( | |||
queryParameters); | |||
String canonicalizedHeaders = canonicalizeHeaderString(headers); | |||
String canonicalRequest = canonicalRequest(url, httpMethod, | |||
canonicalizedQueryParameters, canonicalizedHeaderNames, | |||
canonicalizedHeaders, bodyHash); | |||
byte[] signature = createSignature(bucketConfig, dateTimeStampISO8601, | |||
dateStamp, scope, canonicalRequest); | |||
queryParameters.put(X_AMZ_SIGNATURE, toHex(signature)); | |||
return formatAuthorizationQuery(queryParameters); | |||
} | |||
private static String formatAuthorizationQuery( | |||
Map<String, String> queryParameters) { | |||
StringBuilder s = new StringBuilder(); | |||
for (String key : queryParameters.keySet()) { | |||
appendQuery(s, key, queryParameters.get(key)); | |||
} | |||
return s.toString(); | |||
} | |||
private static void appendQuery(StringBuilder s, String key, | |||
String value) { | |||
if (s.length() != 0) { | |||
s.append("&"); //$NON-NLS-1$ | |||
} | |||
s.append(key).append("=").append(value); //$NON-NLS-1$ | |||
} | |||
/** | |||
* Sign headers for given bucket, url and HTTP method and add signature in | |||
* Authorization header. | |||
* | |||
* @param bucketConfig | |||
* configuration of S3 storage bucket this request should be | |||
* signed for | |||
* @param url | |||
* HTTP request URL | |||
* @param httpMethod | |||
* HTTP method | |||
* @param headers | |||
* HTTP headers to sign | |||
* @param bodyHash | |||
* Pre-computed SHA256 hash of the request body content; this | |||
* value should also be set as the header 'X-Amz-Content-SHA256' | |||
* for non-streaming uploads. | |||
* @return HTTP headers signd by an Authorization header added to the | |||
* headers | |||
*/ | |||
static Map<String, String> createHeaderAuthorization( | |||
S3Config bucketConfig, URL url, String httpMethod, | |||
Map<String, String> headers, String bodyHash) { | |||
addHostHeader(url, headers); | |||
Date now = new Date(); | |||
String dateTimeStamp = dateTimeStampISO8601(now); | |||
headers.put(X_AMZ_DATE, dateTimeStamp); | |||
String canonicalizedHeaderNames = canonicalizeHeaderNames(headers); | |||
String canonicalizedHeaders = canonicalizeHeaderString(headers); | |||
String canonicalRequest = canonicalRequest(url, httpMethod, "", //$NON-NLS-1$ | |||
canonicalizedHeaderNames, canonicalizedHeaders, bodyHash); | |||
String dateStamp = dateStamp(now); | |||
String scope = scope(bucketConfig.getRegion(), dateStamp); | |||
byte[] signature = createSignature(bucketConfig, dateTimeStamp, | |||
dateStamp, scope, canonicalRequest); | |||
headers.put(HDR_AUTHORIZATION, formatAuthorizationHeader(bucketConfig, | |||
canonicalizedHeaderNames, scope, signature)); // $NON-NLS-1$ | |||
return headers; | |||
} | |||
private static String formatAuthorizationHeader( | |||
S3Config bucketConfig, String canonicalizedHeaderNames, | |||
String scope, byte[] signature) { | |||
StringBuilder s = new StringBuilder(); | |||
s.append(SCHEME).append("-").append(ALGORITHM).append(" "); //$NON-NLS-1$ //$NON-NLS-2$ | |||
s.append("Credential=").append(bucketConfig.getAccessKey()).append("/") //$NON-NLS-1$//$NON-NLS-2$ | |||
.append(scope).append(","); //$NON-NLS-1$ | |||
s.append("SignedHeaders=").append(canonicalizedHeaderNames).append(","); //$NON-NLS-1$ //$NON-NLS-2$ | |||
s.append("Signature=").append(toHex(signature)); //$NON-NLS-1$ | |||
return s.toString(); | |||
} | |||
private static void addHostHeader(URL url, | |||
Map<String, String> headers) { | |||
String hostHeader = url.getHost(); | |||
int port = url.getPort(); | |||
if (port > -1) { | |||
hostHeader.concat(":" + Integer.toString(port)); //$NON-NLS-1$ | |||
} | |||
headers.put("Host", hostHeader); //$NON-NLS-1$ | |||
} | |||
private static String canonicalizeHeaderNames( | |||
Map<String, String> headers) { | |||
List<String> sortedHeaders = new ArrayList<String>(); | |||
sortedHeaders.addAll(headers.keySet()); | |||
Collections.sort(sortedHeaders, String.CASE_INSENSITIVE_ORDER); | |||
StringBuilder buffer = new StringBuilder(); | |||
for (String header : sortedHeaders) { | |||
if (buffer.length() > 0) | |||
buffer.append(";"); //$NON-NLS-1$ | |||
buffer.append(header.toLowerCase()); | |||
} | |||
return buffer.toString(); | |||
} | |||
private static String canonicalizeHeaderString( | |||
Map<String, String> headers) { | |||
if (headers == null || headers.isEmpty()) { | |||
return ""; //$NON-NLS-1$ | |||
} | |||
List<String> sortedHeaders = new ArrayList<String>(); | |||
sortedHeaders.addAll(headers.keySet()); | |||
Collections.sort(sortedHeaders, String.CASE_INSENSITIVE_ORDER); | |||
StringBuilder buffer = new StringBuilder(); | |||
for (String key : sortedHeaders) { | |||
buffer.append(key.toLowerCase().replaceAll("\\s+", " ") + ":" //$NON-NLS-1$ //$NON-NLS-2$ //$NON-NLS-3$ | |||
+ headers.get(key).replaceAll("\\s+", " ")); //$NON-NLS-1$//$NON-NLS-2$ | |||
buffer.append("\n"); //$NON-NLS-1$ | |||
} | |||
return buffer.toString(); | |||
} | |||
private static String dateStamp(Date now) { | |||
// TODO(ms) cache and reuse DateFormat instances | |||
SimpleDateFormat dateStampFormat = new SimpleDateFormat( | |||
DATE_STRING_FORMAT); | |||
dateStampFormat.setTimeZone(new SimpleTimeZone(0, UTC)); | |||
String dateStamp = dateStampFormat.format(now); | |||
return dateStamp; | |||
} | |||
private static String dateTimeStampISO8601(Date now) { | |||
// TODO(ms) cache and reuse DateFormat instances | |||
SimpleDateFormat dateTimeFormat = new SimpleDateFormat( | |||
ISO8601_BASIC_FORMAT); | |||
dateTimeFormat.setTimeZone(new SimpleTimeZone(0, UTC)); | |||
String dateTimeStamp = dateTimeFormat.format(now); | |||
return dateTimeStamp; | |||
} | |||
private static String scope(String region, String dateStamp) { | |||
String scope = String.format("%s/%s/%s/%s", dateStamp, region, S3, //$NON-NLS-1$ | |||
TERMINATOR); | |||
return scope; | |||
} | |||
private static String canonicalizeQueryString( | |||
Map<String, String> parameters) { | |||
if (parameters == null || parameters.isEmpty()) { | |||
return ""; //$NON-NLS-1$ | |||
} | |||
SortedMap<String, String> sorted = new TreeMap<String, String>(); | |||
Iterator<Map.Entry<String, String>> pairs = parameters.entrySet() | |||
.iterator(); | |||
while (pairs.hasNext()) { | |||
Map.Entry<String, String> pair = pairs.next(); | |||
String key = pair.getKey(); | |||
String value = pair.getValue(); | |||
sorted.put(urlEncode(key, false), urlEncode(value, false)); | |||
} | |||
StringBuilder builder = new StringBuilder(); | |||
pairs = sorted.entrySet().iterator(); | |||
while (pairs.hasNext()) { | |||
Map.Entry<String, String> pair = pairs.next(); | |||
builder.append(pair.getKey()); | |||
builder.append("="); //$NON-NLS-1$ | |||
builder.append(pair.getValue()); | |||
if (pairs.hasNext()) { | |||
builder.append("&"); //$NON-NLS-1$ | |||
} | |||
} | |||
return builder.toString(); | |||
} | |||
private static String canonicalRequest(URL endpoint, String httpMethod, | |||
String queryParameters, String canonicalizedHeaderNames, | |||
String canonicalizedHeaders, String bodyHash) { | |||
return String.format("%s\n%s\n%s\n%s\n%s\n%s", //$NON-NLS-1$ | |||
httpMethod, canonicalizeResourcePath(endpoint), | |||
queryParameters, canonicalizedHeaders, canonicalizedHeaderNames, | |||
bodyHash); | |||
} | |||
private static String canonicalizeResourcePath(URL endpoint) { | |||
if (endpoint == null) { | |||
return "/"; //$NON-NLS-1$ | |||
} | |||
String path = endpoint.getPath(); | |||
if (path == null || path.isEmpty()) { | |||
return "/"; //$NON-NLS-1$ | |||
} | |||
String encodedPath = urlEncode(path, true); | |||
if (encodedPath.startsWith("/")) { //$NON-NLS-1$ | |||
return encodedPath; | |||
} else { | |||
return "/" + encodedPath; //$NON-NLS-1$ | |||
} | |||
} | |||
private static byte[] hash(String s) { | |||
MessageDigest md = Constants.newMessageDigest(); | |||
md.update(s.getBytes(StandardCharsets.UTF_8)); | |||
return md.digest(); | |||
} | |||
private static byte[] sign(String stringData, byte[] key) { | |||
try { | |||
byte[] data = stringData.getBytes("UTF-8"); //$NON-NLS-1$ | |||
Mac mac = Mac.getInstance(HMACSHA256); | |||
mac.init(new SecretKeySpec(key, HMACSHA256)); | |||
return mac.doFinal(data); | |||
} catch (Exception e) { | |||
throw new RuntimeException(MessageFormat.format( | |||
LfsServerText.get().failedToCalcSignature, e.getMessage()), | |||
e); | |||
} | |||
} | |||
private static String stringToSign(String scheme, String algorithm, | |||
String dateTime, String scope, String canonicalRequest) { | |||
return String.format("%s-%s\n%s\n%s\n%s", //$NON-NLS-1$ | |||
scheme, algorithm, dateTime, scope, | |||
toHex(hash(canonicalRequest))); | |||
} | |||
private static String toHex(byte[] bytes) { | |||
StringBuilder builder = new StringBuilder(2 * bytes.length); | |||
for (byte b : bytes) { | |||
builder.append(HEX.charAt((b & 0xF0) >> 4)); | |||
builder.append(HEX.charAt(b & 0xF)); | |||
} | |||
return builder.toString(); | |||
} | |||
private static String urlEncode(String url, boolean keepPathSlash) { | |||
String encoded; | |||
try { | |||
encoded = URLEncoder.encode(url, StandardCharsets.UTF_8.name()); | |||
} catch (UnsupportedEncodingException e) { | |||
throw new RuntimeException(LfsServerText.get().unsupportedUtf8, e); | |||
} | |||
if (keepPathSlash) { | |||
encoded = encoded.replace("%2F", "/"); //$NON-NLS-1$ //$NON-NLS-2$ | |||
} | |||
return encoded; | |||
} | |||
private static byte[] createSignature(S3Config bucketConfig, | |||
String dateTimeStamp, String dateStamp, | |||
String scope, String canonicalRequest) { | |||
String stringToSign = stringToSign(SCHEME, ALGORITHM, dateTimeStamp, | |||
scope, canonicalRequest); | |||
byte[] signature = (SCHEME + bucketConfig.getSecretKey()).getBytes(); | |||
signature = sign(dateStamp, signature); | |||
signature = sign(bucketConfig.getRegion(), signature); | |||
signature = sign(S3, signature); | |||
signature = sign(TERMINATOR, signature); | |||
signature = sign(stringToSign, signature); | |||
return signature; | |||
} | |||
} |
@@ -42,6 +42,7 @@ Import-Package: javax.servlet;version="[3.1.0,4.0.0)", | |||
org.eclipse.jgit.lfs.lib;version="[4.3.0,4.4.0)", | |||
org.eclipse.jgit.lfs.server;version="[4.3.0,4.4.0)", | |||
org.eclipse.jgit.lfs.server.fs;version="[4.3.0,4.4.0)", | |||
org.eclipse.jgit.lfs.server.s3;version="[4.3.0,4.4.0)", | |||
org.eclipse.jgit.lib;version="[4.3.0,4.4.0)", | |||
org.eclipse.jgit.merge;version="[4.3.0,4.4.0)", | |||
org.eclipse.jgit.nls;version="[4.3.0,4.4.0)", |
@@ -74,6 +74,12 @@ invalidHttpProxyOnlyHttpSupported=Invalid http_proxy: {0}: Only http supported. | |||
jgitVersion=jgit version {0} | |||
lineFormat={0} | |||
listeningOn=Listening on {0} | |||
lfsNoAccessKey=No accessKey in {0} | |||
lfsNoSecretKey=No secretKey in {0} | |||
lfsProtocolUrl=LFS protocol URL: {0} | |||
lfsStoreDirectory=LFS objects stored in: {0} | |||
lfsStoreUrl=LFS store URL: {0} | |||
lfsUnknownStoreType="Unknown LFS store type: {0}" | |||
mergeConflict=CONFLICT(content): Merge conflict in {0} | |||
mergeCheckoutConflict=error: Your local changes to the following files would be overwritten by merge: | |||
mergeFailed=Automatic merge failed; fix conflicts and then commit the result | |||
@@ -107,6 +113,7 @@ metaVar_file=FILE | |||
metaVar_filepattern=filepattern | |||
metaVar_gitDir=GIT_DIR | |||
metaVar_hostName=HOSTNAME | |||
metaVar_lfsStorage=STORAGE | |||
metaVar_linesOfContext=lines | |||
metaVar_message=message | |||
metaVar_n=n | |||
@@ -123,9 +130,11 @@ metaVar_refs=REFS | |||
metaVar_refspec=refspec | |||
metaVar_remoteName=name | |||
metaVar_revision=REVISION | |||
metaVar_s3Bucket=BUCKET | |||
metaVar_s3Region=REGION | |||
metaVar_s3StorageClass=STORAGE-CLASS | |||
metaVar_seconds=SECONDS | |||
metaVar_service=SERVICE | |||
metaVar_lfsStorage=STORAGE | |||
metaVar_treeish=tree-ish | |||
metaVar_uriish=uri-ish | |||
metaVar_url=URL | |||
@@ -169,6 +178,7 @@ remoteMessage=remote: {0} | |||
remoteRefObjectChangedIsNotExpectedOne=remote ref object changed - is not expected one {0} | |||
remoteSideDoesNotSupportDeletingRefs=remote side does not support deleting refs | |||
repaint=Repaint | |||
s3InvalidBucket=Invalid S3 bucket ''{0}'' | |||
serviceNotSupported=Service ''{0}'' not supported | |||
skippingObject=skipping {0} {1} | |||
statusFileListFormat=\t%1$s | |||
@@ -221,7 +231,7 @@ usage_Glog=View commit history as a graph | |||
usage_IndexPack=Build pack index file for an existing packed archive | |||
usage_LFSDirectory=Directory to store large objects | |||
usage_LFSPort=Server http port | |||
usage_LFSRunStore=fs: store lfs objects in file system | |||
usage_LFSRunStore=Store (fs | s3), store lfs objects in file system or Amazon S3 | |||
usage_LFSStoreUrl=URL of the LFS store | |||
usage_LongFormat=Always output the long format | |||
usage_LsRemote=List references in a remote repository | |||
@@ -240,6 +250,12 @@ usage_RepositoryToReceiveInto=Repository to receive into | |||
usage_RevList=List commit objects in reverse chronological order | |||
usage_RevParse=Pick out and massage parameters | |||
usage_RevParseAll=Show all refs found in refs/ | |||
usage_S3Bucket=S3 bucket name | |||
usage_S3Expiration=Authorization validity in seconds, default 60 sec | |||
usage_S3Region=S3 region (us-east-1 | us-west-1 | us-west-2 | eu-west-1 |\ | |||
eu-central-1 | ap-southeast-1 | ap- southeast-2 | ap-northeast-1 |\ | |||
ap-northeast-2 | sa-east-1), see http://docs.aws.amazon.com/general/latest/gr/rande.html#s3_region | |||
usage_S3StorageClass=S3 storage class (REDUCED_REDUNDANCY | STANDARD), default REDUCED_REDUNDANCY | |||
usage_ServerSideBackendForJgitFetch=Server side backend for 'jgit fetch' | |||
usage_ServerSideBackendForJgitPush=Server side backend for 'jgit push' | |||
usage_ShowCacheTree=Show cache tree | |||
@@ -353,6 +369,7 @@ usage_resetHard=Resets the index and working tree | |||
usage_resetSoft=Resets without touching the index file nor the working tree | |||
usage_resetMixed=Resets the index but not the working tree | |||
usage_runLfsStore=Run LFS Store in a given directory | |||
usage_S3NoSslVerify=Skip verification of Amazon server certificate and hostname | |||
usage_setTheGitRepositoryToOperateOn=set the git repository to operate on | |||
usage_show=display one commit | |||
usage_showRefNamesMatchingCommits=Show ref names matching commits |
@@ -43,12 +43,15 @@ | |||
package org.eclipse.jgit.pgm.debug; | |||
import java.io.File; | |||
import java.io.IOException; | |||
import java.net.InetAddress; | |||
import java.net.URI; | |||
import java.net.URISyntaxException; | |||
import java.net.UnknownHostException; | |||
import java.nio.file.Path; | |||
import java.nio.file.Paths; | |||
import java.text.MessageFormat; | |||
import org.eclipse.jetty.server.Connector; | |||
import org.eclipse.jetty.server.HttpConfiguration; | |||
@@ -58,12 +61,18 @@ import org.eclipse.jetty.server.ServerConnector; | |||
import org.eclipse.jetty.server.handler.ContextHandlerCollection; | |||
import org.eclipse.jetty.servlet.ServletContextHandler; | |||
import org.eclipse.jetty.servlet.ServletHolder; | |||
import org.eclipse.jgit.errors.ConfigInvalidException; | |||
import org.eclipse.jgit.lfs.server.LargeFileRepository; | |||
import org.eclipse.jgit.lfs.server.LfsProtocolServlet; | |||
import org.eclipse.jgit.lfs.server.fs.FileLfsServlet; | |||
import org.eclipse.jgit.lfs.server.fs.FileLfsRepository; | |||
import org.eclipse.jgit.lfs.server.s3.S3Config; | |||
import org.eclipse.jgit.lfs.server.s3.S3Repository; | |||
import org.eclipse.jgit.pgm.Command; | |||
import org.eclipse.jgit.pgm.TextBuiltin; | |||
import org.eclipse.jgit.pgm.internal.CLIText; | |||
import org.eclipse.jgit.storage.file.FileBasedConfig; | |||
import org.eclipse.jgit.util.FS; | |||
import org.kohsuke.args4j.Argument; | |||
import org.kohsuke.args4j.Option; | |||
@@ -153,7 +162,11 @@ class LfsStore extends TextBuiltin { | |||
} | |||
private static enum StoreType { | |||
FS; | |||
FS, S3; | |||
} | |||
private static enum StorageClass { | |||
REDUCED_REDUNDANCY, STANDARD | |||
} | |||
private static final String OBJECTS = "objects/"; //$NON-NLS-1$ | |||
@@ -162,8 +175,8 @@ class LfsStore extends TextBuiltin { | |||
private static final String PROTOCOL_PATH = "/lfs/objects/batch"; //$NON-NLS-1$ | |||
@Option(name = "--port", aliases = {"-p" }, metaVar = "metaVar_port", | |||
usage = "usage_LFSPort") | |||
@Option(name = "--port", aliases = {"-p" }, | |||
metaVar = "metaVar_port", usage = "usage_LFSPort") | |||
int port; | |||
@Option(name = "--store", metaVar = "metaVar_lfsStorage", usage = "usage_LFSRunStore") | |||
@@ -173,6 +186,25 @@ class LfsStore extends TextBuiltin { | |||
usage = "usage_LFSStoreUrl") | |||
String storeUrl; | |||
@Option(name = "--region", aliases = {"-r" }, | |||
metaVar = "metaVar_s3Region", usage = "usage_S3Region") | |||
String region; // $NON-NLS-1$ | |||
@Option(name = "--bucket", aliases = {"-b" }, | |||
metaVar = "metaVar_s3Bucket", usage = "usage_S3Bucket") | |||
String bucket; // $NON-NLS-1$ | |||
@Option(name = "--storage-class", aliases = {"-c" }, | |||
metaVar = "metaVar_s3StorageClass", usage = "usage_S3StorageClass") | |||
StorageClass storageClass = StorageClass.REDUCED_REDUNDANCY; | |||
@Option(name = "--expire", aliases = {"-e" }, | |||
metaVar = "metaVar_seconds", usage = "usage_S3Expiration") | |||
int expirationSeconds = 600; | |||
@Option(name = "--no-ssl-verify", usage = "usage_S3NoSslVerify") | |||
boolean disableSslVerify = false; | |||
@Argument(required = false, metaVar = "metaVar_directory", usage = "usage_LFSDirectory") | |||
String directory; | |||
@@ -203,9 +235,17 @@ class LfsStore extends TextBuiltin { | |||
repository = fsRepo; | |||
break; | |||
case S3: | |||
readAWSKeys(); | |||
checkOptions(); | |||
S3Config config = new S3Config(region.toString(), bucket, | |||
storageClass.toString(), accessKey, secretKey, | |||
expirationSeconds, disableSslVerify); | |||
repository = new S3Repository(config); | |||
break; | |||
default: | |||
throw new IllegalArgumentException( | |||
"Unknown store type: " + storeType); //$NON-NLS-1$ | |||
throw new IllegalArgumentException(MessageFormat | |||
.format(CLIText.get().lfsUnknownStoreType, storeType)); | |||
} | |||
LfsProtocolServlet protocol = new LfsProtocolServlet() { | |||
@@ -222,10 +262,38 @@ class LfsStore extends TextBuiltin { | |||
server.start(); | |||
outw.println("LFS protocol URL: " + getProtocolUrl(baseURI)); //$NON-NLS-1$ | |||
outw.println(MessageFormat.format(CLIText.get().lfsProtocolUrl, | |||
getProtocolUrl(baseURI))); | |||
if (storeType == StoreType.FS) { | |||
outw.println("LFS objects located in: " + directory); //$NON-NLS-1$ | |||
outw.println("LFS store URL: " + getStoreUrl(baseURI)); //$NON-NLS-1$ | |||
outw.println(MessageFormat.format(CLIText.get().lfsStoreDirectory, | |||
directory)); | |||
outw.println(MessageFormat.format(CLIText.get().lfsStoreUrl, | |||
getStoreUrl(baseURI))); | |||
} | |||
} | |||
private void checkOptions() { | |||
if (bucket == null || bucket.length() == 0) { | |||
throw die(MessageFormat.format(CLIText.get().s3InvalidBucket, | |||
bucket)); | |||
} | |||
} | |||
private void readAWSKeys() throws IOException, ConfigInvalidException { | |||
String credentialsPath = System.getProperty("user.home") //$NON-NLS-1$ | |||
+ "/.aws/credentials"; //$NON-NLS-1$ | |||
FileBasedConfig c = new FileBasedConfig(new File(credentialsPath), | |||
FS.DETECTED); | |||
c.load(); | |||
accessKey = c.getString("default", null, "accessKey"); //$NON-NLS-1$//$NON-NLS-2$ | |||
secretKey = c.getString("default", null, "secretKey"); //$NON-NLS-1$ //$NON-NLS-2$ | |||
if (accessKey == null || accessKey.isEmpty()) { | |||
throw die(MessageFormat.format(CLIText.get().lfsNoAccessKey, | |||
credentialsPath)); | |||
} | |||
if (secretKey == null || secretKey.isEmpty()) { | |||
throw die(MessageFormat.format(CLIText.get().lfsNoSecretKey, | |||
credentialsPath)); | |||
} | |||
} | |||
@@ -150,6 +150,12 @@ public class CLIText extends TranslationBundle { | |||
/***/ public String initializedEmptyGitRepositoryIn; | |||
/***/ public String invalidHttpProxyOnlyHttpSupported; | |||
/***/ public String jgitVersion; | |||
/***/ public String lfsNoAccessKey; | |||
/***/ public String lfsNoSecretKey; | |||
/***/ public String lfsProtocolUrl; | |||
/***/ public String lfsStoreDirectory; | |||
/***/ public String lfsStoreUrl; | |||
/***/ public String lfsUnknownStoreType; | |||
/***/ public String lineFormat; | |||
/***/ public String listeningOn; | |||
/***/ public String mergeCheckoutConflict; | |||
@@ -178,6 +184,7 @@ public class CLIText extends TranslationBundle { | |||
/***/ public String metaVar_filepattern; | |||
/***/ public String metaVar_gitDir; | |||
/***/ public String metaVar_hostName; | |||
/***/ public String metaVar_lfsStorage; | |||
/***/ public String metaVar_linesOfContext; | |||
/***/ public String metaVar_message; | |||
/***/ public String metaVar_n; | |||
@@ -192,6 +199,9 @@ public class CLIText extends TranslationBundle { | |||
/***/ public String metaVar_refs; | |||
/***/ public String metaVar_refspec; | |||
/***/ public String metaVar_remoteName; | |||
/***/ public String metaVar_s3Bucket; | |||
/***/ public String metaVar_s3Region; | |||
/***/ public String metaVar_s3StorageClass; | |||
/***/ public String metaVar_seconds; | |||
/***/ public String metaVar_service; | |||
/***/ public String metaVar_treeish; | |||
@@ -237,6 +247,7 @@ public class CLIText extends TranslationBundle { | |||
/***/ public String remoteRefObjectChangedIsNotExpectedOne; | |||
/***/ public String remoteSideDoesNotSupportDeletingRefs; | |||
/***/ public String repaint; | |||
/***/ public String s3InvalidBucket; | |||
/***/ public String serviceNotSupported; | |||
/***/ public String skippingObject; | |||
/***/ public String statusFileListFormat; |