@@ -34,8 +34,8 @@ mvn -DskipTests=true -Dcheckstyle.skip=true compile && rsync -avzc ./target/clas | |||
if [[ ! -z "${NO_RESTART}" && "${NO_RESTART}" == "norestart" ]] ; then | |||
echo "Patching but not restarting..." | |||
ssh ${HOST} "cd /tmp && cp ~bubble/current/bubble.jar . && cd classes && jar uvf ../bubble.jar . | egrep -v '*/\(*' && cat ../bubble.jar > ~bubble/current/bubble.jar" || die "Error patching remote jar" | |||
ssh ${HOST} "cd /tmp && cp ~bubble/api/bubble.jar . && cd classes && jar uvf ../bubble.jar . | egrep -v '*/\(*' && cat ../bubble.jar > ~bubble/api/bubble.jar" || die "Error patching remote jar" | |||
else | |||
echo "Patching and restarting..." | |||
ssh ${HOST} "cd /tmp && cp ~bubble/current/bubble.jar . && cd classes && jar uvf ../bubble.jar . | egrep -v '*/\(*' && cat ../bubble.jar > ~bubble/current/bubble.jar && supervisorctl restart bubble" || die "Error patching remote jar" | |||
ssh ${HOST} "cd /tmp && cp ~bubble/api/bubble.jar . && cd classes && jar uvf ../bubble.jar . | egrep -v '*/\(*' && cat ../bubble.jar > ~bubble/api/bubble.jar && supervisorctl restart bubble" || die "Error patching remote jar" | |||
fi |
@@ -51,8 +51,8 @@ fi | |||
if [[ ! -z "${NO_RESTART}" && "${NO_RESTART}" == "norestart" ]] ; then | |||
echo "Patching but not restarting..." | |||
ssh ${HOST} "cat /tmp/bubble.jar > ~bubble/current/bubble.jar" | |||
ssh ${HOST} "cat /tmp/bubble.jar > ~bubble/api/bubble.jar" | |||
else | |||
echo "Patching and restarting..." | |||
ssh ${HOST} "cat /tmp/bubble.jar > ~bubble/current/bubble.jar && supervisorctl restart bubble" | |||
ssh ${HOST} "cat /tmp/bubble.jar > ~bubble/api/bubble.jar && supervisorctl restart bubble" | |||
fi |
@@ -17,7 +17,7 @@ | |||
# BUBBLE_API : which API to use. Default is local (http://127.0.0.1:PORT, where PORT is found in .bubble.env) | |||
# BUBBLE_USER : account to use. Default is root | |||
# BUBBLE_PASS : password for account. Default is root | |||
# BUBBLE_ENV : env file to load. Default is ~/.bubble.env or /home/bubble/current/bubble.env (whichever is found first) | |||
# BUBBLE_ENV : env file to load. Default is ~/.bubble.env or /home/bubble/api/bubble.env (whichever is found first) | |||
# DEBUG_PORT : if set, this is the port number the client will wait for a debugger to attach before starting | |||
# BUBBLE_INCLUDE : when using the sync-model and run-script commands, this is the directory to find included files | |||
# For sync-model and migrate-model, the default is the current directory. | |||
@@ -62,8 +62,8 @@ fi | |||
if [[ -z "${BUBBLE_ENV}" ]] ; then | |||
if [[ -f "${HOME}/.bubble.env" ]] ; then | |||
BUBBLE_ENV="${HOME}/.bubble.env" | |||
elif [[ -f "/home/bubble/current/bubble.env" ]] ; then | |||
BUBBLE_ENV="/home/bubble/current/bubble.env" | |||
elif [[ -f "/home/bubble/api/bubble.env" ]] ; then | |||
BUBBLE_ENV="/home/bubble/api/bubble.env" | |||
else | |||
die "bubble environment file not found" | |||
fi | |||
@@ -87,10 +87,10 @@ if [[ -z "${BUBBLE_USER}" ]] ; then | |||
fi | |||
if [[ -z "${BUBBLE_JAR}" ]] ; then | |||
if [[ -f "${HOME}/current/bubble.jar" ]] ; then | |||
BUBBLE_JAR="${HOME}/current/bubble.jar" | |||
elif [[ -f "/home/bubble/current/bubble.jar" ]] ; then | |||
BUBBLE_JAR="/home/bubble/current/bubble.jar" | |||
if [[ -f "${HOME}/api/bubble.jar" ]] ; then | |||
BUBBLE_JAR="${HOME}/api/bubble.jar" | |||
elif [[ -f "/home/bubble/api/bubble.jar" ]] ; then | |||
BUBBLE_JAR="/home/bubble/api/bubble.jar" | |||
elif [[ -f "${BUBBLE_SCRIPTS}/../bubble.jar" ]] ; then | |||
BUBBLE_JAR="${BUBBLE_SCRIPTS}/../bubble.jar" | |||
else | |||
@@ -13,7 +13,7 @@ | |||
# | |||
# Environment variables | |||
# | |||
# BUBBLE_ENV : env file to load. Default is ~/.bubble.env or /home/bubble/current/bubble.env (whichever is found first) | |||
# BUBBLE_ENV : env file to load. Default is ~/.bubble.env or /home/bubble/api/bubble.env (whichever is found first) | |||
# | |||
SCRIPT="${0}" | |||
SCRIPT_DIR=$(cd $(dirname ${SCRIPT}) && pwd) | |||
@@ -7,7 +7,7 @@ | |||
# | |||
# Environment variables | |||
# | |||
# BUBBLE_ENV : env file to load. Default is ~/.bubble-test.env or /home/bubble/current/bubble-test.env (whichever is found first) | |||
# BUBBLE_ENV : env file to load. Default is ~/.bubble-test.env or /home/bubble/api/bubble-test.env (whichever is found first) | |||
# AWS_ACCESS_KEY_ID : AWS access key ID | |||
# AWS_SECRET_KEY : AWS secret access key | |||
@@ -17,8 +17,8 @@ SCRIPT_DIR=$(cd $(dirname ${SCRIPT}) && pwd) | |||
if [[ -z "${BUBBLE_ENV}" ]] ; then | |||
if [[ -f "${HOME}/.bubble-test.env" ]] ; then | |||
BUBBLE_ENV="${HOME}/.bubble-test.env" | |||
elif [[ -f "/home/bubble/current/.bubble-test.env" ]] ; then | |||
BUBBLE_ENV="/home/bubble/current/.bubble-test.env" | |||
elif [[ -f "/home/bubble/api/.bubble-test.env" ]] ; then | |||
BUBBLE_ENV="/home/bubble/api/.bubble-test.env" | |||
else | |||
die "bubble environment file not found" | |||
fi | |||
@@ -25,7 +25,7 @@ | |||
# BUBBLE_API : which API to use. Default is local (http://127.0.0.1:PORT, where PORT is found in .bubble.env) | |||
# BUBBLE_USER : account to use. Default is root | |||
# BUBBLE_PASS : password for account. Default is root | |||
# BUBBLE_ENV : env file to load. Default is ~/.bubble.env or /home/bubble/current/bubble.env (whichever is found first) | |||
# BUBBLE_ENV : env file to load. Default is ~/.bubble.env or /home/bubble/api/bubble.env (whichever is found first) | |||
# DEBUG_PORT : if set, this is the port number the client will wait for a debugger to attach before starting | |||
# BUBBLE_INCLUDE : when using the sync-model and run-script commands, this is the directory to find included files | |||
# For sync-model and migrate-model, the default is the current directory. | |||
@@ -54,7 +54,7 @@ fi | |||
if [[ -z "${BUBBLE_ENV}" ]] ; then | |||
BUBBLE_ENV="${HOME}/.bubble.env" | |||
if [[ ! -f "${BUBBLE_ENV}" ]] ; then | |||
BUBBLE_ENV="/home/bubble/current/bubble.env" | |||
BUBBLE_ENV="/home/bubble/api/bubble.env" | |||
fi | |||
fi | |||
if [[ -f ${BUBBLE_ENV} ]] ; then | |||
@@ -8,10 +8,9 @@ import bubble.cloud.CloudRegion; | |||
import bubble.cloud.CloudServiceDriverBase; | |||
import bubble.dao.cloud.BubbleNodeDAO; | |||
import bubble.model.cloud.BubbleNode; | |||
import bubble.service.packer.PackerService; | |||
import lombok.Getter; | |||
import lombok.extern.slf4j.Slf4j; | |||
import org.cobbzilla.util.http.HttpRequestBean; | |||
import org.cobbzilla.util.http.HttpResponseBean; | |||
import org.springframework.beans.factory.annotation.Autowired; | |||
import java.io.IOException; | |||
@@ -19,9 +18,7 @@ import java.util.ArrayList; | |||
import java.util.List; | |||
import java.util.concurrent.atomic.AtomicReference; | |||
import static bubble.model.cloud.BubbleNode.TAG_SSH_KEY_ID; | |||
import static org.cobbzilla.util.daemon.ZillaRuntime.die; | |||
import static org.cobbzilla.util.security.RsaKeyPair.newRsaKeyPair; | |||
@Slf4j | |||
public abstract class ComputeServiceDriverBase | |||
@@ -44,24 +41,7 @@ public abstract class ComputeServiceDriverBase | |||
} | |||
@Autowired protected BubbleNodeDAO nodeDAO; | |||
public String registerSshKey(BubbleNode node) { | |||
if (node.hasSshKey()) return die("registerSshKey: node already has a key: "+node.getUuid()); | |||
node.setSshKey(newRsaKeyPair()); | |||
final HttpRequestBean keyRequest = registerSshKeyRequest(node); | |||
final HttpResponseBean keyResponse = keyRequest.curl(); // fixme: we can do better than shelling to curl | |||
if (keyResponse.getStatus() != 200) return die("start: error creating SSH key: " + keyResponse); | |||
final String keyId = readSshKeyId(keyResponse); | |||
node.setTag(TAG_SSH_KEY_ID, keyId); | |||
nodeDAO.update(node); | |||
return keyId; | |||
} | |||
protected abstract String readSshKeyId(HttpResponseBean keyResponse); | |||
protected abstract HttpRequestBean registerSshKeyRequest(BubbleNode node); | |||
@Autowired protected PackerService packerService; | |||
public abstract List<BubbleNode> listNodes() throws IOException; | |||
@@ -5,16 +5,19 @@ import static bubble.service.packer.PackerJob.PACKER_IMAGE_PREFIX; | |||
public abstract class PackerImageParserBase extends ListResourceParser<PackerImage> { | |||
private String bubbleVersion; | |||
private String keyHash; | |||
private String jarSha; | |||
public PackerImageParserBase(String bubbleVersion, String jarSha) { | |||
public PackerImageParserBase(String bubbleVersion, String keyHash, String jarSha) { | |||
this.bubbleVersion = bubbleVersion; | |||
this.keyHash = keyHash; | |||
this.jarSha = jarSha; | |||
} | |||
public boolean isValidPackerImage(String name) { | |||
if (!name.startsWith(PACKER_IMAGE_PREFIX)) return false; | |||
if (!name.contains("_"+bubbleVersion+"_")) return false; | |||
if (!name.contains("_"+keyHash+"_")) return false; | |||
// if (!name.endsWith("_"+jarSha)) return false; | |||
return true; | |||
} | |||
@@ -15,10 +15,12 @@ import org.cobbzilla.util.http.HttpRequestBean; | |||
import org.cobbzilla.util.http.HttpResponseBean; | |||
import java.io.IOException; | |||
import java.util.*; | |||
import java.util.ArrayList; | |||
import java.util.Collection; | |||
import java.util.Collections; | |||
import java.util.List; | |||
import static bubble.model.cloud.BubbleNode.TAG_INSTANCE_ID; | |||
import static bubble.model.cloud.BubbleNode.TAG_SSH_KEY_ID; | |||
import static java.util.concurrent.TimeUnit.MINUTES; | |||
import static java.util.concurrent.TimeUnit.SECONDS; | |||
import static org.apache.http.HttpHeaders.AUTHORIZATION; | |||
@@ -126,17 +128,6 @@ public class DigitalOceanDriver extends ComputeServiceDriverBase { | |||
return json(response.getEntityString(), clazz, FULL_MAPPER_ALLOW_UNKNOWN_FIELDS); | |||
} | |||
@Override protected HttpRequestBean registerSshKeyRequest(BubbleNode node) { | |||
final Map<String, String> key = new LinkedHashMap<>(); | |||
key.put("name", node.getUuid()+"_"+now()); | |||
key.put("public_key", node.getSshKey().getSshPublicKey()); | |||
return postRequest("account/keys", json(key)); | |||
} | |||
@Override protected String readSshKeyId(HttpResponseBean keyResponse) { | |||
return ""+json(keyResponse.getEntityString(), JsonNode.class, FULL_MAPPER_ALLOW_UNKNOWN_FIELDS).get("ssh_key").get("id").intValue(); | |||
} | |||
@Override public List<BubbleNode> listNodes() throws IOException { return listNodes(TAG_PREFIX_CLOUD+cloud.getUuid()); } | |||
public List<BubbleNode> listNodes(String tag) throws IOException { | |||
@@ -181,7 +172,6 @@ public class DigitalOceanDriver extends ComputeServiceDriverBase { | |||
// todo: lookup image based on node installType and region | |||
final String os = getOs().getName(); | |||
final String sshKeyId = registerSshKey(node); | |||
final CreateDropletRequest createRequest = new CreateDropletRequest() | |||
.setName(node.getFqdn()) | |||
@@ -192,7 +182,6 @@ public class DigitalOceanDriver extends ComputeServiceDriverBase { | |||
.setBackups(false) | |||
.setMonitoring(false) | |||
.setPrivate_networking(false) | |||
.setSsh_keys(new Integer[] {Integer.valueOf(sshKeyId)}) | |||
.setTags(new String[] {TAG_PREFIX_CLOUD+cloud.getUuid(), TAG_PREFIX_NODE+node.getUuid()}); | |||
final CreateDropletResponse droplet = doPost("droplets", json(createRequest), CreateDropletResponse.class); | |||
@@ -223,21 +212,7 @@ public class DigitalOceanDriver extends ComputeServiceDriverBase { | |||
return node; | |||
} | |||
@Override public BubbleNode cleanupStart(BubbleNode node) throws Exception { | |||
if (node.hasTag(TAG_SSH_KEY_ID)) { | |||
final String keyId = node.getTag(TAG_SSH_KEY_ID); | |||
final HttpRequestBean destroyKeyRequest = auth(new HttpRequestBean() | |||
.setMethod(DELETE) | |||
.setUri(DO_API_BASE+"account/keys/"+keyId)); | |||
// destroy key, check response | |||
final HttpResponseBean destroyKeyResponse = getResponse(destroyKeyRequest); | |||
if (destroyKeyResponse.getStatus() != NO_CONTENT) { | |||
log.warn("cleanupStart: error destroying sshkey: "+ keyId); | |||
} | |||
} | |||
return node; | |||
} | |||
@Override public BubbleNode cleanupStart(BubbleNode node) { return node; } | |||
@Override public BubbleNode stop(BubbleNode node) throws Exception { | |||
cleanupStart(node); // just in case the key is still around | |||
@@ -258,7 +233,7 @@ public class DigitalOceanDriver extends ComputeServiceDriverBase { | |||
} | |||
@Override public List<PackerImage> getPackerImages() { | |||
final List<PackerImage> images = getResources(PACKER_IMAGES_URI, new DigitalOceanPackerImageParser(configuration.getVersion(), configuration.getJarSha())); | |||
final List<PackerImage> images = getResources(PACKER_IMAGES_URI, new DigitalOceanPackerImageParser(configuration.getVersion(), packerService.getPackerPublicKeyHash(), configuration.getJarSha())); | |||
return images == null ? Collections.emptyList() : images; | |||
} | |||
@@ -10,8 +10,8 @@ import java.util.List; | |||
public class DigitalOceanPackerImageParser extends PackerImageParserBase { | |||
public DigitalOceanPackerImageParser (String bubbleVersion, String jarSha) { | |||
super(bubbleVersion, jarSha); | |||
public DigitalOceanPackerImageParser (String bubbleVersion, String keyHash, String jarSha) { | |||
super(bubbleVersion, keyHash, jarSha); | |||
} | |||
@Override public boolean allowEmpty() { return true; } | |||
@@ -21,8 +21,6 @@ import com.amazonaws.services.ec2.model.*; | |||
import lombok.Getter; | |||
import lombok.extern.slf4j.Slf4j; | |||
import org.cobbzilla.util.daemon.AwaitResult; | |||
import org.cobbzilla.util.http.HttpRequestBean; | |||
import org.cobbzilla.util.http.HttpResponseBean; | |||
import java.io.IOException; | |||
import java.util.*; | |||
@@ -31,13 +29,12 @@ import java.util.concurrent.ExecutorService; | |||
import java.util.concurrent.Future; | |||
import java.util.concurrent.TimeUnit; | |||
import static bubble.model.cloud.BubbleNode.*; | |||
import static bubble.model.cloud.BubbleNode.TAG_INSTANCE_ID; | |||
import static bubble.model.cloud.BubbleNode.TAG_TEST; | |||
import static org.cobbzilla.util.daemon.Await.awaitAll; | |||
import static org.cobbzilla.util.daemon.DaemonThreadFactory.fixedPool; | |||
import static org.cobbzilla.util.daemon.ZillaRuntime.*; | |||
import static org.cobbzilla.util.daemon.ZillaRuntime.notSupported; | |||
import static org.cobbzilla.util.http.HttpStatusCodes.OK; | |||
import static org.cobbzilla.util.security.RsaKeyPair.newRsaKeyPair; | |||
import static org.cobbzilla.wizard.resources.ResourceUtil.notFoundEx; | |||
@Slf4j | |||
@@ -90,15 +87,6 @@ public class AmazonEC2Driver extends ComputeServiceDriverBase { | |||
return ec2client; | |||
} | |||
@Override protected String readSshKeyId(HttpResponseBean keyResponse) { | |||
throw new UnsupportedOperationException(); | |||
} | |||
@Override protected HttpRequestBean registerSshKeyRequest(BubbleNode node) { | |||
throw new UnsupportedOperationException(); | |||
} | |||
@Override public List<BubbleNode> listNodes() throws IOException { | |||
List<Future<?>> listNodeJobs = new ArrayList<>(); | |||
for (final String regionName : getEc2ClientMap().keySet()) { | |||
@@ -171,8 +159,6 @@ public class AmazonEC2Driver extends ComputeServiceDriverBase { | |||
.filter(s -> s.getAvailableIpAddressCount() != 0) | |||
.findAny().orElseThrow(() -> new NoSuchElementException("Subnet not found")).getSubnetId(); | |||
final String keyId = registerSshKey(node); | |||
final RunInstancesRequest runInstancesRequest = new RunInstancesRequest().withImageId(config.getConfig("imageId")) | |||
.withInstanceType(size.getInternalName()) | |||
.withMinCount(MIN_COUNT) | |||
@@ -193,7 +179,6 @@ public class AmazonEC2Driver extends ComputeServiceDriverBase { | |||
final String instanceId = runInstancesResult.getReservation().getInstances().get(0).getInstanceId(); | |||
node.setState(BubbleNodeState.booting); | |||
node.setTag(TAG_INSTANCE_ID, instanceId).setTag(TAG_SSH_KEY_ID, keyId); | |||
nodeDAO.update(node); | |||
// Describe instances to check run instance result and get IP addresses | |||
@@ -249,14 +234,9 @@ public class AmazonEC2Driver extends ComputeServiceDriverBase { | |||
return node; | |||
} | |||
@Override public BubbleNode cleanupStart(BubbleNode node) throws Exception { | |||
deleteEC2KeyPair(node); | |||
return node; | |||
} | |||
@Override public BubbleNode cleanupStart(BubbleNode node) { return node; } | |||
@Override public BubbleNode stop(BubbleNode node) throws Exception { | |||
deleteEC2KeyPair(node); // just in case | |||
if (!node.hasTag(TAG_INSTANCE_ID)) { | |||
throw notFoundEx(node.id()); | |||
} | |||
@@ -294,39 +274,6 @@ public class AmazonEC2Driver extends ComputeServiceDriverBase { | |||
return node; | |||
} | |||
private void deleteEC2KeyPair(BubbleNode node) throws Exception { | |||
if (node.hasTag(TAG_SSH_KEY_ID)) { | |||
final DeleteKeyPairRequest request = new DeleteKeyPairRequest() | |||
.withKeyName(KEY_NAME_PREFIX + node.getUuid()); | |||
// destroy key | |||
final AmazonEC2 ec2Client = getEC2Client(node.getRegion()); | |||
try { | |||
ec2Client.deleteKeyPair(request); | |||
} catch (AmazonServiceException e) { | |||
log.warn("deleteEC2KeyPair: error deleting EC2keyPair, node: " | |||
+ node.getUuid() + e.getErrorMessage() + e.getErrorCode()); | |||
} | |||
} | |||
} | |||
@Override public String registerSshKey(BubbleNode node) { | |||
if (node.hasSshKey()) return die("registerSshKey: node already has a key: "+node.getUuid()); | |||
node.setSshKey(newRsaKeyPair()); | |||
final AmazonEC2 ec2Client = getEC2Client(node.getRegion()); | |||
final ImportKeyPairRequest importKeyPairRequest = new ImportKeyPairRequest( | |||
KEY_NAME_PREFIX + node.getUuid(), node.getSshKey().getSshPublicKey() | |||
); | |||
final ImportKeyPairResult importKeyPairResult = ec2Client.importKeyPair(importKeyPairRequest); | |||
if (importKeyPairResult.getSdkHttpMetadata().getHttpStatusCode() != OK) { | |||
return die("registerSshKey: error creating keyPair: " | |||
+ importKeyPairResult.getSdkHttpMetadata().getAllHttpHeaders()); | |||
} | |||
return importKeyPairResult.getKeyFingerprint(); | |||
} | |||
// todo | |||
@Override public List<PackerImage> getPackerImages() { return notSupported("getPackerImages"); } | |||
@@ -10,15 +10,15 @@ import bubble.cloud.geoLocation.mock.MockGeoLocationDriver; | |||
import bubble.model.cloud.BubbleNode; | |||
import bubble.model.cloud.BubbleNodeState; | |||
import lombok.Getter; | |||
import org.cobbzilla.util.http.HttpRequestBean; | |||
import org.cobbzilla.util.http.HttpResponseBean; | |||
import java.io.IOException; | |||
import java.util.*; | |||
import java.util.ArrayList; | |||
import java.util.Collections; | |||
import java.util.List; | |||
import java.util.Map; | |||
import java.util.concurrent.ConcurrentHashMap; | |||
import static java.util.Collections.singletonList; | |||
import static org.cobbzilla.util.daemon.ZillaRuntime.now; | |||
public class MockComputeDriver extends ComputeServiceDriverBase { | |||
@@ -35,12 +35,6 @@ public class MockComputeDriver extends ComputeServiceDriverBase { | |||
@Getter private final List<OsImage> cloudOsImages = singletonList(new OsImage().setName("dummy operating system")); | |||
@Override protected String readSshKeyId(HttpResponseBean keyResponse) { return "dummy_ssh_key_id_"+now(); } | |||
@Override public String registerSshKey(BubbleNode node) { return readSshKeyId(null); } | |||
@Override protected HttpRequestBean registerSshKeyRequest(BubbleNode node) { return null; } | |||
@Override public BubbleNode start(BubbleNode node) throws Exception { | |||
node.setIp4("127.0.0.1"); | |||
node.setIp6("::1"); | |||
@@ -24,19 +24,17 @@ import java.util.ArrayList; | |||
import java.util.Collections; | |||
import java.util.Iterator; | |||
import java.util.List; | |||
import java.util.concurrent.atomic.AtomicReference; | |||
import java.util.function.Function; | |||
import static bubble.model.cloud.BubbleNode.TAG_INSTANCE_ID; | |||
import static bubble.model.cloud.BubbleNode.TAG_SSH_KEY_ID; | |||
import static java.util.concurrent.TimeUnit.MINUTES; | |||
import static java.util.concurrent.TimeUnit.SECONDS; | |||
import static org.apache.http.HttpHeaders.CONTENT_ENCODING; | |||
import static org.cobbzilla.util.daemon.ZillaRuntime.*; | |||
import static org.cobbzilla.util.http.HttpMethods.POST; | |||
import static org.cobbzilla.util.http.HttpStatusCodes.*; | |||
import static org.cobbzilla.util.http.HttpUtil.getResponse; | |||
import static org.cobbzilla.util.json.JsonUtil.json; | |||
import static org.cobbzilla.util.string.StringUtil.urlEncode; | |||
import static org.cobbzilla.util.system.Sleep.sleep; | |||
import static org.cobbzilla.wizard.resources.ResourceUtil.invalidEx; | |||
import static org.cobbzilla.wizard.resources.ResourceUtil.notFoundEx; | |||
@@ -109,6 +107,8 @@ public class VultrDriver extends ComputeServiceDriverBase { | |||
super.postSetup(); | |||
} | |||
public static final long PACKER_TIMEOUT = MINUTES.toMillis(60); | |||
@Override public BubbleNode start(BubbleNode node) throws Exception { | |||
final CloudRegion region = config.getRegion(node.getRegion()); | |||
@@ -120,18 +120,26 @@ public class VultrDriver extends ComputeServiceDriverBase { | |||
final Long planId = getSize(size.getType()).getId(); | |||
if (planId == null) return die("start: plan not found: "+size.getInternalName()); | |||
final PackerImage packerImage = getPackerImage(node.getInstallType()); | |||
if (packerImage == null) return die("start: no packer image found for install type: "+node.getInstallType()); | |||
// register ssh key, check response | |||
final String sshKeyId = registerSshKey(node); | |||
PackerImage packerImage = getPackerImage(node.getInstallType()); | |||
if (packerImage == null) { | |||
final AtomicReference<List<PackerImage>> imagesRef = new AtomicReference<>(); | |||
packerService.writePackerImages(cloud, node.getInstallType(), imagesRef); | |||
long start = now(); | |||
while (imagesRef.get() == null && now() - start < PACKER_TIMEOUT) { | |||
sleep(SECONDS.toMillis(1), "start: waiting for packer image creation"); | |||
} | |||
if (imagesRef.get() == null) return die("start: timeout creating packer image"); | |||
packerImage = getPackerImage(node.getInstallType()); | |||
if (packerImage == null) { | |||
return die("start: error creating packer image"); | |||
} | |||
} | |||
// prepare to create server | |||
final String data = "DCID=" + regionId + | |||
"&VPSPLANID=" + planId + | |||
"&OSID=" + getSnapshotOsId() + | |||
"&SNAPSHOTID=" + packerImage.getId() + | |||
"&SSHKEYID=" + sshKeyId + | |||
"&tag=" + cloud.getUuid() + | |||
"&label=" + node.getFqdn() + | |||
"&enable_ipv6=yes"; | |||
@@ -161,8 +169,12 @@ public class VultrDriver extends ComputeServiceDriverBase { | |||
if (serverNode.has("tag") | |||
&& serverNode.get("tag").textValue().equals(cloud.getUuid()) | |||
&& serverNode.has("status") | |||
&& serverNode.has("server_state") | |||
&& serverNode.has(VULTR_V4_IP)) { | |||
final String serverState = serverNode.get("server_state").textValue(); | |||
if (serverState.equals("none") || serverState.equals("locked")) continue; | |||
final String status = serverNode.get("status").textValue(); | |||
final String ip4 = serverNode.get(VULTR_V4_IP).textValue(); | |||
if (ip4 != null && ip4.length() > 0 && !ip4.equals("0.0.0.0")) { | |||
@@ -191,17 +203,7 @@ public class VultrDriver extends ComputeServiceDriverBase { | |||
return node; | |||
} | |||
@Override public HttpRequestBean registerSshKeyRequest(BubbleNode node) { | |||
final String keyData = "name="+urlEncode(node.getUuid())+"&ssh_key="+urlEncode(node.getSshKey().getSshPublicKey()); | |||
return auth(new HttpRequestBean(POST, CREATE_SSH_KEY_URL, keyData).setHeader(CONTENT_ENCODING, "application/x-www-form-urlencoded")); | |||
} | |||
@Override protected String readSshKeyId(HttpResponseBean keyResponse) { | |||
return json(keyResponse.getEntityString(), JsonNode.class).get("SSHKEYID").textValue(); | |||
} | |||
@Override public BubbleNode cleanupStart(BubbleNode node) throws Exception { | |||
deleteVultrKey(node); | |||
return node; | |||
} | |||
@@ -210,9 +212,6 @@ public class VultrDriver extends ComputeServiceDriverBase { | |||
} | |||
@Override public BubbleNode stop(BubbleNode node) throws Exception { | |||
deleteVultrKey(node); // just in case | |||
Exception lastEx = null; | |||
final long start = now(); | |||
while (now() - start < SERVER_STOP_TIMEOUT) { | |||
@@ -383,22 +382,8 @@ public class VultrDriver extends ComputeServiceDriverBase { | |||
} | |||
} | |||
public void deleteVultrKey(BubbleNode node) throws IOException { | |||
if (node.hasTag(TAG_SSH_KEY_ID)) { | |||
final String keyId = node.getTag(TAG_SSH_KEY_ID); | |||
final String keyData = "SSHKEYID="+ keyId; | |||
final HttpRequestBean destroyKeyRequest = auth(new HttpRequestBean(POST, DESTROY_SSH_KEY_URL, keyData)); | |||
// destroy key, check response | |||
final HttpResponseBean destroyKeyResponse = destroyKeyRequest.curl(); | |||
if (destroyKeyResponse.getStatus() != OK) { | |||
log.warn("cleanupStart: error destroying sshkey: "+ keyId); | |||
} | |||
} | |||
} | |||
@Override public List<PackerImage> getPackerImages() { | |||
final List<PackerImage> images = loadCloudResources(SNAPSHOT_URL, new VultrPackerImageParser(configuration.getVersion(), configuration.getJarSha())); | |||
final List<PackerImage> images = loadCloudResources(SNAPSHOT_URL, new VultrPackerImageParser(configuration.getVersion(), packerService.getPackerPublicKeyHash(), configuration.getJarSha())); | |||
return images == null ? Collections.emptyList() : images; | |||
} | |||
@@ -457,13 +442,14 @@ public class VultrDriver extends ComputeServiceDriverBase { | |||
return false; | |||
} | |||
if (servers.size() != 1) { | |||
log.error("finalizeIncompletePackerRun: expected only one server, found: "+servers.size()); | |||
return false; | |||
log.warn("finalizeIncompletePackerRun: expected only one server, found: "+servers.size()); | |||
} | |||
// now shut down the server | |||
// now shut down the server(s) | |||
try { | |||
stopServer(servers.get(0).getTag(TAG_INSTANCE_ID)); | |||
for (BubbleNode node : servers) { | |||
stopServer(node.getTag(TAG_INSTANCE_ID)); | |||
} | |||
} catch (Exception e) { | |||
log.error("finalizeIncompletePackerRun: error stopping server: "+shortError(e)); | |||
return false; | |||
@@ -8,8 +8,8 @@ import static org.cobbzilla.util.daemon.ZillaRuntime.die; | |||
public class VultrPackerImageParser extends PackerImageParserBase { | |||
public VultrPackerImageParser(String bubbleVersion, String jarSha) { | |||
super(bubbleVersion, jarSha); | |||
public VultrPackerImageParser(String bubbleVersion, String keyHash, String jarSha) { | |||
super(bubbleVersion, keyHash, jarSha); | |||
} | |||
@Override public PackerImage parse(JsonNode item) { | |||
@@ -20,7 +20,6 @@ import lombok.NoArgsConstructor; | |||
import lombok.Setter; | |||
import lombok.experimental.Accessors; | |||
import org.apache.commons.lang3.RandomUtils; | |||
import org.cobbzilla.util.security.RsaKeyPair; | |||
import org.cobbzilla.wizard.client.ApiClientBase; | |||
import org.cobbzilla.wizard.model.Identifiable; | |||
import org.cobbzilla.wizard.model.IdentifiableBase; | |||
@@ -53,11 +52,10 @@ import static org.cobbzilla.wizard.model.entityconfig.annotations.ECForeignKeySe | |||
public class BubbleNode extends IdentifiableBase implements HasNetwork, HasBubbleTags<BubbleNode> { | |||
public static final String TAG_INSTANCE_ID = "instance_id"; | |||
public static final String TAG_SSH_KEY_ID = "ssh_key_id"; | |||
public static final String TAG_ERROR = "X-Bubble-Error"; | |||
public static final String TAG_TEST = "test_instance"; | |||
private static final List<String> TAG_NAMES = Arrays.asList(TAG_INSTANCE_ID, TAG_SSH_KEY_ID, TAG_ERROR); | |||
private static final List<String> TAG_NAMES = Arrays.asList(TAG_INSTANCE_ID, TAG_ERROR); | |||
@Override public Collection<String> validTags() { return TAG_NAMES; } | |||
@@ -219,9 +217,6 @@ public class BubbleNode extends IdentifiableBase implements HasNetwork, HasBubbl | |||
ephemeralTags.put(name, value); | |||
} | |||
@Transient @JsonIgnore @Getter @Setter private transient RsaKeyPair sshKey; | |||
public boolean hasSshKey () { return sshKey != null; } | |||
@Transient @Getter @Setter private transient BubblePlan plan; | |||
public boolean hasPlan () { return plan != null; } | |||
@@ -1,8 +1,8 @@ | |||
package bubble.resources.cloud; | |||
import bubble.cloud.compute.ComputeServiceDriver; | |||
import bubble.cloud.compute.PackerImage; | |||
import bubble.model.account.Account; | |||
import bubble.model.cloud.AnsibleInstallType; | |||
import bubble.model.cloud.CloudService; | |||
import bubble.server.BubbleConfiguration; | |||
import bubble.service.packer.PackerService; | |||
@@ -16,8 +16,6 @@ import javax.ws.rs.PUT; | |||
import javax.ws.rs.Produces; | |||
import javax.ws.rs.core.Context; | |||
import javax.ws.rs.core.Response; | |||
import java.util.Collections; | |||
import java.util.List; | |||
import static javax.ws.rs.core.MediaType.APPLICATION_JSON; | |||
import static org.cobbzilla.wizard.resources.ResourceUtil.ok; | |||
@@ -47,9 +45,9 @@ public class ComputePackerResource { | |||
@PUT | |||
public Response writeImages(@Context Request req, | |||
@Context ContainerRequest ctx) { | |||
final List<PackerImage> images = packer.writePackerImages(account, cloud); | |||
if (images != null) return ok(images); | |||
return ok(Collections.emptyList()); | |||
packer.writePackerImages(cloud, AnsibleInstallType.sage, null); | |||
packer.writePackerImages(cloud, AnsibleInstallType.node, null); | |||
return ok(); | |||
} | |||
} |
@@ -61,7 +61,7 @@ public class BubbleServer extends RestServerBase<BubbleConfiguration> { | |||
public static final String[] DEFAULT_ENV_FILE_PATHS = { | |||
HOME_DIR + ".bubble.env", | |||
HOME_DIR + "/current/bubble.env", | |||
HOME_DIR + "/api/bubble.env", | |||
System.getProperty("user.dir") + "/bubble.env" | |||
}; | |||
@@ -37,6 +37,7 @@ import java.util.List; | |||
import java.util.Map; | |||
import static bubble.service.backup.RestoreService.RESTORE_MONITOR_SCRIPT_TIMEOUT_SECONDS; | |||
import static org.apache.commons.io.FileUtils.copyDirectory; | |||
import static org.cobbzilla.util.daemon.ZillaRuntime.die; | |||
import static org.cobbzilla.util.io.FileUtil.abs; | |||
import static org.cobbzilla.util.io.FileUtil.mkdirOrDie; | |||
@@ -119,9 +120,7 @@ public class AnsiblePrepService { | |||
for (String roleName : installRoles) { | |||
final TempDir roleTemp = copyClasspathDirectory("ansible/roles/"+roleName); | |||
final File roleDir = new File(rolesDir, roleName); | |||
if (!roleTemp.renameTo(roleDir)) { | |||
return die("prepAnsible: error renaming role dir "+abs(roleTemp)+" -> "+abs(roleDir)); | |||
} | |||
copyDirectory(roleTemp, roleDir); | |||
final File bubbleRoleJson = new File(abs(roleDir)+"/files/bubble_role.json"); | |||
if (bubbleRoleJson.exists()) { | |||
final File varsDir = mkdirOrDie(new File(abs(roleDir)+"/vars")); | |||
@@ -131,7 +130,7 @@ public class AnsiblePrepService { | |||
for (NameAndValue cfg : role.getConfig()) { | |||
final String cfgName = cfg.getName(); | |||
final String rawVal = cfg.getValue(); | |||
final String value = HandlebarsUtil.apply(handlebars, rawVal, ctx); | |||
final String value = HandlebarsUtil.apply(handlebars, rawVal, ctx, '[', ']'); | |||
if (value == null || value.trim().length() == 0) { | |||
if (!role.hasOptionalConfigNames() || !role.isOptionalConfigName(cfgName)) { | |||
errors.addViolation("err.role.config." + cfgName + ".required", "value for " + cfgName + " evaluated to empty string"); | |||
@@ -46,8 +46,7 @@ public class NodeProgressMeterConstants { | |||
public static final String METER_ERROR_PLAN_NOT_ENABLED = "BUBBLE-ERROR: PLAN NOT ENABLED"; | |||
public static final String METER_ERROR_PEER_LIMIT_REACHED = "BUBBLE-ERROR: PEER LIMIT REACHED"; | |||
public static final String METER_ERROR_NODE_CLOUD_NOT_FOUND = "BUBBLE-ERROR: NODE CLOUD NOT FOUND"; | |||
public static final String METER_ERROR_BUBBLE_JAR_NOT_FOUND = "BUBBLE-ERROR: BUBBLE JAR NOT FOUND"; | |||
public static final String METER_ERROR_NO_IP_OR_SSH_KEY = "BUBBLE-ERROR: NODE STARTED BUT HAS NO IP ADDRESS OR SSH KEY"; | |||
public static final String METER_ERROR_NO_IP = "BUBBLE-ERROR: NODE STARTED BUT HAS NO IP ADDRESS"; | |||
public static final String METER_ERROR_ROLE_VALIDATION_ERRORS = "BUBBLE-ERROR: ROLE VALIDATION FAILED"; | |||
public static final String METER_COMPLETED = "meter_completed"; | |||
@@ -4,7 +4,6 @@ | |||
*/ | |||
package bubble.service.cloud; | |||
import bubble.ApiConstants; | |||
import bubble.cloud.CloudAndRegion; | |||
import bubble.cloud.compute.ComputeServiceDriver; | |||
import bubble.cloud.dns.DnsServiceDriver; | |||
@@ -32,6 +31,7 @@ import bubble.server.BubbleConfiguration; | |||
import bubble.service.backup.RestoreService; | |||
import bubble.service.bill.PromotionService; | |||
import bubble.service.notify.NotificationService; | |||
import bubble.service.packer.PackerService; | |||
import com.github.jknack.handlebars.Handlebars; | |||
import lombok.Cleanup; | |||
import lombok.Getter; | |||
@@ -85,10 +85,10 @@ import static org.cobbzilla.wizard.resources.ResourceUtil.notFoundEx; | |||
public class StandardNetworkService implements NetworkService { | |||
public static final String PLAYBOOK_YML = "playbook.yml"; | |||
public static final String PLAYBOOK_TEMPLATE = stream2string(ApiConstants.ANSIBLE_DIR + "/" + PLAYBOOK_YML + ".hbs"); | |||
public static final String PLAYBOOK_TEMPLATE = stream2string(ANSIBLE_DIR + "/" + PLAYBOOK_YML + ".hbs"); | |||
public static final String INSTALL_LOCAL_SH = "install_local.sh"; | |||
public static final String INSTALL_LOCAL_TEMPLATE = stream2string(ApiConstants.ANSIBLE_DIR + "/" + INSTALL_LOCAL_SH + ".hbs"); | |||
public static final String INSTALL_LOCAL_TEMPLATE = stream2string(ANSIBLE_DIR + "/" + INSTALL_LOCAL_SH + ".hbs"); | |||
public static final int MAX_ANSIBLE_TRIES = 1; | |||
public static final int RESTORE_KEY_LEN = 6; | |||
@@ -116,6 +116,7 @@ public class StandardNetworkService implements NetworkService { | |||
@Autowired private NodeService nodeService; | |||
@Autowired private GeoService geoService; | |||
@Autowired private AnsiblePrepService ansiblePrep; | |||
@Autowired private PackerService packerService; | |||
@Autowired private RestoreService restoreService; | |||
@Autowired private RedisService redisService; | |||
@@ -208,12 +209,6 @@ public class StandardNetworkService implements NetworkService { | |||
sageNode = thisNode; | |||
} | |||
final File bubbleJar = configuration.getBubbleJar(); | |||
if (!bubbleJar.exists()) { | |||
progressMeter.error(METER_ERROR_BUBBLE_JAR_NOT_FOUND); | |||
return die("newNode: bubble.jar not found"); | |||
} | |||
@Cleanup("delete") final TempDir automation = new TempDir(); | |||
final File bubbleFilesDir = mkdirOrDie(new File(abs(automation) + "/roles/bubble/files")); | |||
@@ -232,9 +227,9 @@ public class StandardNetworkService implements NetworkService { | |||
nodeDAO.update(node); | |||
// Sanity check that it came up OK | |||
if (!node.hasIp4() || !node.hasSshKey()) { | |||
progressMeter.error(METER_ERROR_NO_IP_OR_SSH_KEY); | |||
final String message = "newNode: node booted but has no IP or SSH key"; | |||
if (!node.hasIp4()) { | |||
progressMeter.error(METER_ERROR_NO_IP); | |||
final String message = "newNode: node booted but has no IP"; | |||
killNode(node, message); | |||
return die(message); | |||
} | |||
@@ -265,13 +260,9 @@ public class StandardNetworkService implements NetworkService { | |||
// write inventory file | |||
final File inventory = new File(automation, "hosts"); | |||
final File sshKeyFile = secureFile(automation, ".ssh_key", node.getSshKey().getSshPrivateKey()); | |||
final File sshKeyFile = packerService.getPackerPrivateKey(); | |||
toFile(inventory, "[bubble]\n127.0.0.1" | |||
+ " ansible_python_interpreter=/usr/bin/python3" | |||
+ " ansible_ssh_private_key_file=" +abs(sshKeyFile)+"\n"); | |||
// write jar file | |||
copyFile(bubbleJar, new File(bubbleFilesDir, "bubble.jar")); | |||
+ " ansible_python_interpreter=/usr/bin/python3\n"); | |||
// write SSH key, if present | |||
if (network.hasSshKey()) { | |||
@@ -331,6 +322,10 @@ public class StandardNetworkService implements NetworkService { | |||
final CommandResult result = ansibleSetup(script, progressMeter); | |||
// .... wait for ansible ... | |||
if (!result.isZeroExitStatus()) { | |||
if (result.getStderr().contains("Connection timed out")) { | |||
log.warn("newNode: SSH connection timeout"); | |||
continue; | |||
} | |||
return die("newNode: error in setup:\nstdout=" + result.getStdout() + "\nstderr=" + result.getStderr()); | |||
} | |||
setupOk = true; | |||
@@ -417,11 +412,11 @@ public class StandardNetworkService implements NetworkService { | |||
// rsync ansible dir to remote host | |||
"echo '" + METER_TICK_COPYING_ANSIBLE + "' && " + | |||
"rsync -az -e \"ssh " + sshArgs + "\" . "+sshTarget+ ":" + ApiConstants.ANSIBLE_DIR + " && " + | |||
"rsync -az -e \"ssh " + sshArgs + "\" . "+sshTarget+ ":" + ANSIBLE_DIR + " && " + | |||
// run install_local.sh on remote host, installs ansible locally | |||
"echo '" + METER_TICK_RUNNING_ANSIBLE + "' && " + | |||
"ssh "+sshArgs+" "+sshTarget+" ~"+nodeUser+ "/" + ApiConstants.ANSIBLE_DIR + "/" + INSTALL_LOCAL_SH; | |||
"ssh "+sshArgs+" "+sshTarget+" ~"+nodeUser+ "/" + ANSIBLE_DIR + "/" + INSTALL_LOCAL_SH; | |||
} | |||
private File writeFile(File dir, Map<String, Object> ctx, String filename, String templateOrData) throws IOException { | |||
@@ -7,6 +7,7 @@ import bubble.cloud.compute.ComputeServiceDriver; | |||
import bubble.cloud.compute.PackerConfig; | |||
import bubble.cloud.compute.PackerImage; | |||
import bubble.cloud.geoLocation.GeoLocation; | |||
import bubble.dao.account.AccountDAO; | |||
import bubble.model.account.Account; | |||
import bubble.model.cloud.AnsibleInstallType; | |||
import bubble.model.cloud.CloudService; | |||
@@ -29,6 +30,7 @@ import org.springframework.beans.factory.annotation.Autowired; | |||
import java.io.File; | |||
import java.util.*; | |||
import java.util.concurrent.Callable; | |||
import java.util.concurrent.atomic.AtomicReference; | |||
import java.util.stream.Collectors; | |||
import static bubble.ApiConstants.copyScripts; | |||
@@ -51,8 +53,9 @@ public class PackerJob implements Callable<List<PackerImage>> { | |||
public static final String PACKER_IMAGE_NAME_VAR = "packerImageName"; | |||
public static final String PACKER_IMAGE_PREFIX = "packer_bubble_"; | |||
public static final String BUBBLE_VERSION_VAR = "@@BUBBLE_VERSION@@"; | |||
public static final String PACKER_KEY_VAR = "@@PACKER_KEY_HASH@@"; | |||
public static final String JAR_SHA_VAR = "@@JAR_SHA256@@"; | |||
public static final String PACKER_IMAGE_NAME_TEMPLATE = PACKER_IMAGE_PREFIX+INSTALL_TYPE_VAR+"_"+BUBBLE_VERSION_VAR+"_"+JAR_SHA_VAR; | |||
public static final String PACKER_IMAGE_NAME_TEMPLATE = PACKER_IMAGE_PREFIX+INSTALL_TYPE_VAR+"_"+PACKER_KEY_VAR+"_"+BUBBLE_VERSION_VAR+"_"+JAR_SHA_VAR; | |||
public static final String VARIABLES_VAR = "packerVariables"; | |||
public static final String BUILD_REGION_VAR = "buildRegion"; | |||
public static final String IMAGE_REGIONS_VAR = "imageRegions"; | |||
@@ -61,19 +64,23 @@ public class PackerJob implements Callable<List<PackerImage>> { | |||
public static final String PACKER_PLAYBOOK = "packer-playbook.yml"; | |||
@Autowired private BubbleConfiguration configuration; | |||
@Autowired private AccountDAO accountDAO; | |||
@Autowired private GeoService geoService; | |||
@Autowired private PackerService packerService; | |||
@Getter private Account account; | |||
@Getter private CloudService cloud; | |||
@Getter private AnsibleInstallType installType; | |||
@Getter private AtomicReference<List<PackerImage>> imagesRef; | |||
@Getter private List<PackerImage> images = new ArrayList<>(); | |||
private CloudRegionRelative buildRegion; | |||
public PackerJob(Account account, CloudService cloud) { | |||
this.account = account; | |||
public PackerJob(CloudService cloud, AnsibleInstallType installType, AtomicReference<List<PackerImage>> imagesRef) { | |||
this.cloud = cloud; | |||
this.installType = installType; | |||
this.imagesRef = imagesRef; | |||
} | |||
public String cacheKey() { return PackerService.cacheKey(cloud, installType); } | |||
@Override public List<PackerImage> call() throws Exception { | |||
try { | |||
final List<PackerImage> images = _call(); | |||
@@ -100,11 +107,14 @@ public class PackerJob implements Callable<List<PackerImage>> { | |||
ctx.put("sizes", computeDriver.getSizesMap()); | |||
ctx.put("os", computeDriver.getOs()); | |||
// Find admin account | |||
final Account account = accountDAO.getFirstAdmin(); | |||
// determine lat/lon to find closest cloud region to perform build in | |||
final GeoLocation here = geoService.locate(account.getUuid(), getExternalIp()); | |||
final List<CloudRegionRelative> closestRegions = findClosestRegions(new SingletonList<>(cloud), null, here.getLatitude(), here.getLongitude()); | |||
if (empty(closestRegions)) return die("writePackerImages: no closest region could be determined"); | |||
buildRegion = closestRegions.get(0); | |||
CloudRegionRelative buildRegion = closestRegions.get(0); | |||
ctx.put(BUILD_REGION_VAR, buildRegion); | |||
// set environment variables | |||
@@ -117,114 +127,121 @@ public class PackerJob implements Callable<List<PackerImage>> { | |||
// copy ansible and other packer files to temp dir | |||
@Cleanup final TempDir tempDir = copyClasspathDirectory("packer"); | |||
// copy packer ssh key | |||
copyFile(packerService.getPackerPublicKey(), new File(abs(tempDir)+"/roles/common/files/"+PACKER_KEY_NAME)); | |||
// copy bubble jar and scripts to role dir, calculate shasum for packer image name | |||
final File jar = configuration.getBubbleJar(); | |||
final File bubbleFilesDir = new File(abs(tempDir)+"/roles/bubble/files"); | |||
FileUtil.copyFile(jar, new File(abs(bubbleFilesDir)+"/bubble.jar")); | |||
copyFile(jar, new File(abs(bubbleFilesDir)+"/bubble.jar")); | |||
copyScripts(bubbleFilesDir); | |||
// write packer config file | |||
for (AnsibleInstallType installType : PACKER_TYPES) { | |||
// check to see if we have packer images for all regions | |||
final List<PackerImage> existingImages = computeDriver.getPackerImages(); | |||
if (!empty(existingImages)) { | |||
final List<PackerImage> existingForInstallType = existingImages.stream() | |||
.filter(i -> i.getName().startsWith(PACKER_IMAGE_PREFIX+installType.name())) | |||
.collect(Collectors.toList()); | |||
if (!empty(existingForInstallType)) { | |||
if (existingForInstallType.size() == 1 && existingForInstallType.get(0).getRegions() == null) { | |||
// this image is for all regions | |||
// check to see if we have packer images for all regions | |||
final List<PackerImage> existingImages = computeDriver.getPackerImages(); | |||
if (!empty(existingImages)) { | |||
final List<PackerImage> existingForInstallType = existingImages.stream() | |||
.filter(i -> i.getName().startsWith(PACKER_IMAGE_PREFIX+installType.name())) | |||
.collect(Collectors.toList()); | |||
if (!empty(existingForInstallType)) { | |||
if (existingForInstallType.size() == 1 && existingForInstallType.get(0).getRegions() == null) { | |||
// this image is for all regions | |||
log.info("packer image already exists for "+installType+" for all regions"); | |||
images = existingForInstallType; | |||
if (imagesRef != null) imagesRef.set(images); | |||
return images; | |||
} else { | |||
final List<CloudRegion> existingRegions = new ArrayList<>(); | |||
for (PackerImage image : existingForInstallType) { | |||
existingRegions.addAll(Arrays.asList(image.getRegions())); | |||
} | |||
log.info("packer images already exist for "+installType+" for regions: "+existingRegions.stream().map(CloudRegion::getInternalName).collect(Collectors.joining(", "))); | |||
final List<String> existingRegionNames = existingRegions.stream().map(CloudRegion::getInternalName).collect(Collectors.toList());; | |||
// only create packer images for regions that are missing | |||
final List<String> imagesToCreate = computeDriver.getRegions().stream() | |||
.filter(r -> !existingRegionNames.contains(r.getInternalName())) | |||
.map(CloudRegion::getInternalName) | |||
.collect(Collectors.toList()); | |||
if (empty(imagesToCreate)) { | |||
log.info("packer image already exists for "+installType+" for all regions"); | |||
continue; | |||
} else { | |||
final List<CloudRegion> existingRegions = new ArrayList<>(); | |||
for (PackerImage image : existingForInstallType) { | |||
existingRegions.addAll(Arrays.asList(image.getRegions())); | |||
} | |||
log.info("packer images already exist for "+installType+" for regions: "+existingRegions.stream().map(CloudRegion::getInternalName).collect(Collectors.joining(", "))); | |||
final List<String> existingRegionNames = existingRegions.stream().map(CloudRegion::getInternalName).collect(Collectors.toList());; | |||
// only create packer images for regions that are missing | |||
final List<String> imagesToCreate = computeDriver.getRegions().stream() | |||
.filter(r -> !existingRegionNames.contains(r.getInternalName())) | |||
.map(CloudRegion::getInternalName) | |||
.collect(Collectors.toList()); | |||
if (empty(imagesToCreate)) { | |||
log.info("packer image already exists for "+installType+" for all regions"); | |||
continue; | |||
} | |||
ctx.put(IMAGE_REGIONS_VAR, toInnerStringList(imagesToCreate)); | |||
images = existingForInstallType; | |||
if (imagesRef != null) imagesRef.set(images); | |||
return images; | |||
} | |||
} else { | |||
// create list of all regions, without leading/trailing double-quote, which should already be in the template | |||
addAllRegions(computeDriver, ctx); | |||
ctx.put(IMAGE_REGIONS_VAR, toInnerStringList(imagesToCreate)); | |||
} | |||
} else { | |||
// create list of all regions, without leading/trailing double-quote, which should already be in the template | |||
addAllRegions(computeDriver, ctx); | |||
} | |||
} else { | |||
// create list of all regions, without leading/trailing double-quote, which should already be in the template | |||
addAllRegions(computeDriver, ctx); | |||
} | |||
final String imageName = PACKER_IMAGE_NAME_TEMPLATE | |||
.replace(INSTALL_TYPE_VAR, installType.name()) | |||
.replace(BUBBLE_VERSION_VAR, configuration.getVersion()) | |||
.replace(JAR_SHA_VAR, jarSha); | |||
ctx.put(PACKER_IMAGE_NAME_VAR, imageName); | |||
final String packerConfigTemplate = stream2string(PACKER_TEMPLATE); | |||
ctx.put("installType", installType.name()); | |||
ctx.put("roles", getRolesForInstallType(installType)); | |||
final List<String> builderJsons = new ArrayList<>(); | |||
if (packerConfig.iterateRegions()) { | |||
for (CloudRegion region : computeDriver.getRegions()) { | |||
ctx.put("region", region); | |||
builderJsons.add(generateBuilder(packerConfig, ctx)); | |||
} | |||
} else { | |||
final String imageName = PACKER_IMAGE_NAME_TEMPLATE | |||
.replace(INSTALL_TYPE_VAR, installType.name()) | |||
.replace(BUBBLE_VERSION_VAR, configuration.getVersion()) | |||
.replace(PACKER_KEY_VAR, packerService.getPackerPublicKeyHash()) | |||
.replace(JAR_SHA_VAR, jarSha); | |||
ctx.put(PACKER_IMAGE_NAME_VAR, imageName); | |||
final String packerConfigTemplate = stream2string(PACKER_TEMPLATE); | |||
ctx.put("installType", installType.name()); | |||
ctx.put("roles", getRolesForInstallType(installType)); | |||
final List<String> builderJsons = new ArrayList<>(); | |||
if (packerConfig.iterateRegions()) { | |||
for (CloudRegion region : computeDriver.getRegions()) { | |||
ctx.put("region", region); | |||
builderJsons.add(generateBuilder(packerConfig, ctx)); | |||
} | |||
ctx.put(BUILDERS_VAR, builderJsons); | |||
// write playbook file | |||
final String playbookTemplate = FileUtil.toString(abs(tempDir)+ "/" + PACKER_PLAYBOOK_TEMPLATE); | |||
FileUtil.toFile(new File(tempDir, PACKER_PLAYBOOK), HandlebarsUtil.apply(configuration.getHandlebars(), playbookTemplate, ctx, '[', ']')); | |||
// write packer file | |||
final String packerJson = HandlebarsUtil.apply(configuration.getHandlebars(), packerConfigTemplate, ctx, '[', ']'); | |||
toFileOrDie(abs(tempDir) + "/packer.json", packerJson); | |||
// run packer, return handle to running packer | |||
log.info("running packer for " + installType + "..."); | |||
final CommandResult commandResult = CommandShell.exec(new Command(new CommandLine("packer") | |||
.addArgument("build") | |||
.addArgument("-parallel-builds=2") | |||
.addArgument("-color=false") | |||
.addArgument("packer.json")) | |||
.setDir(tempDir) | |||
.setEnv(env) | |||
.setCopyToStandard(true)); | |||
if (commandResult.isZeroExitStatus()) { | |||
// read manifest, populate images | |||
final File packerManifestFile = new File(tempDir, "manifest.json"); | |||
if (!packerManifestFile.exists()) { | |||
return die("Error executing packer: manifest file not found: " + abs(packerManifestFile)); | |||
} | |||
final PackerManifest packerManifest = json(FileUtil.toString(packerManifestFile), PackerManifest.class); | |||
final PackerBuild[] builds = packerManifest.getBuilds(); | |||
if (empty(builds)) { | |||
return die("Error executing packer: no builds found"); | |||
} | |||
images.addAll(Arrays.stream(builds).map(b -> b.toPackerImage(imageName)).collect(Collectors.toList())); | |||
} else { | |||
builderJsons.add(generateBuilder(packerConfig, ctx)); | |||
} | |||
ctx.put(BUILDERS_VAR, builderJsons); | |||
// write playbook file | |||
final String playbookTemplate = FileUtil.toString(abs(tempDir)+ "/" + PACKER_PLAYBOOK_TEMPLATE); | |||
FileUtil.toFile(new File(tempDir, PACKER_PLAYBOOK), HandlebarsUtil.apply(configuration.getHandlebars(), playbookTemplate, ctx, '[', ']')); | |||
// write packer file | |||
final String packerJson = HandlebarsUtil.apply(configuration.getHandlebars(), packerConfigTemplate, ctx, '[', ']'); | |||
toFileOrDie(abs(tempDir) + "/packer.json", packerJson); | |||
// run packer, return handle to running packer | |||
log.info("running packer for " + installType + "..."); | |||
final CommandResult commandResult = CommandShell.exec(new Command(new CommandLine("packer") | |||
.addArgument("build") | |||
.addArgument("-parallel-builds=2") | |||
.addArgument("-color=false") | |||
.addArgument("packer.json")) | |||
.setDir(tempDir) | |||
.setEnv(env) | |||
.setCopyToStandard(true)); | |||
if (commandResult.isZeroExitStatus()) { | |||
// read manifest, populate images | |||
final File packerManifestFile = new File(tempDir, "manifest.json"); | |||
if (!packerManifestFile.exists()) { | |||
return die("Error executing packer: manifest file not found: " + abs(packerManifestFile)); | |||
} | |||
final PackerManifest packerManifest = json(FileUtil.toString(packerManifestFile), PackerManifest.class); | |||
final PackerBuild[] builds = packerManifest.getBuilds(); | |||
if (empty(builds)) { | |||
return die("Error executing packer: no builds found"); | |||
} | |||
images.addAll(Arrays.stream(builds).map(b -> b.toPackerImage(imageName)).collect(Collectors.toList())); | |||
} else { | |||
final List<PackerImage> finalizedImages = computeDriver.finalizeIncompletePackerRun(commandResult, installType, jarSha); | |||
if (empty(finalizedImages)) { | |||
return die("Error executing packer: exit status " + commandResult.getExitStatus()); | |||
} | |||
images.addAll(finalizedImages); | |||
} else { | |||
final List<PackerImage> finalizedImages = computeDriver.finalizeIncompletePackerRun(commandResult, installType, jarSha); | |||
if (empty(finalizedImages)) { | |||
return die("Error executing packer: exit status " + commandResult.getExitStatus()); | |||
} | |||
images.addAll(finalizedImages); | |||
} | |||
if (imagesRef != null) imagesRef.set(images); | |||
return images; | |||
} | |||
@@ -1,25 +1,31 @@ | |||
package bubble.service.packer; | |||
import bubble.cloud.compute.PackerImage; | |||
import bubble.model.account.Account; | |||
import bubble.model.cloud.AnsibleInstallType; | |||
import bubble.model.cloud.CloudService; | |||
import bubble.server.BubbleConfiguration; | |||
import lombok.extern.slf4j.Slf4j; | |||
import org.cobbzilla.util.daemon.DaemonThreadFactory; | |||
import org.cobbzilla.util.security.ShaUtil; | |||
import org.springframework.beans.factory.annotation.Autowired; | |||
import org.springframework.stereotype.Service; | |||
import java.io.File; | |||
import java.util.List; | |||
import java.util.Map; | |||
import java.util.concurrent.ConcurrentHashMap; | |||
import java.util.concurrent.ExecutorService; | |||
import java.util.concurrent.Future; | |||
import java.util.concurrent.atomic.AtomicReference; | |||
import java.util.stream.Collectors; | |||
import static org.cobbzilla.util.daemon.ZillaRuntime.empty; | |||
import static org.cobbzilla.util.daemon.ZillaRuntime.shortError; | |||
import static org.cobbzilla.util.daemon.ZillaRuntime.*; | |||
import static org.cobbzilla.util.io.FileUtil.abs; | |||
import static org.cobbzilla.util.io.FileUtil.mkdirOrDie; | |||
import static org.cobbzilla.util.io.StreamUtil.stream2string; | |||
import static org.cobbzilla.util.string.StringUtil.splitAndTrim; | |||
import static org.cobbzilla.util.system.CommandShell.chmod; | |||
import static org.cobbzilla.util.system.CommandShell.execScript; | |||
@Service @Slf4j | |||
public class PackerService { | |||
@@ -31,9 +37,7 @@ public class PackerService { | |||
public static final List<String> NODE_ROLES = splitAndTrim(stream2string(PACKER_DIR + "/node-roles.txt"), "\n") | |||
.stream().filter(s -> !empty(s)).collect(Collectors.toList()); | |||
// todo: change packer-sage.json.hbs to packer.json.hbs, insert required roles via handlebars | |||
// todo: change packer-playbook.yml to packer-playbook.yml.hbs, insert required roles via handlebars | |||
public static final String PACKER_KEY_NAME = "packer_rsa"; | |||
private final Map<String, Future<List<PackerImage>>> activeJobs = new ConcurrentHashMap<>(16); | |||
private final Map<String, List<PackerImage>> completedJobs = new ConcurrentHashMap<>(16); | |||
@@ -41,28 +45,53 @@ public class PackerService { | |||
@Autowired private BubbleConfiguration configuration; | |||
public List<PackerImage> writePackerImages(Account account, CloudService cloud) { | |||
public List<PackerImage> writePackerImages(CloudService cloud, | |||
AnsibleInstallType installType, | |||
AtomicReference<List<PackerImage>> imagesRef) { | |||
final String cacheKey = cacheKey(cloud, installType); | |||
synchronized (activeJobs) { | |||
final List<PackerImage> images = completedJobs.get(cloud.getUuid()); | |||
final List<PackerImage> images = completedJobs.get(cacheKey); | |||
if (images != null) return images; | |||
activeJobs.computeIfAbsent(cloud.getUuid(), k -> { | |||
final PackerJob packerJob = configuration.autowire(new PackerJob(account, cloud)); | |||
activeJobs.computeIfAbsent(cacheKey, k -> { | |||
final PackerJob packerJob = configuration.autowire(new PackerJob(cloud, installType, imagesRef)); | |||
return pool.submit(packerJob); | |||
}); | |||
return null; | |||
} | |||
} | |||
public static String cacheKey(CloudService cloud, AnsibleInstallType installType) { | |||
return cloud.getUuid()+"_"+installType; | |||
} | |||
public void recordJobCompleted(PackerJob job) { | |||
synchronized (activeJobs) { | |||
activeJobs.remove(job.getCloud().getUuid()); | |||
completedJobs.put(job.getCloud().getUuid(), job.getImages()); | |||
activeJobs.remove(job.cacheKey()); | |||
completedJobs.put(job.cacheKey(), job.getImages()); | |||
} | |||
} | |||
public void recordJobError(PackerJob job, Exception e) { | |||
log.error("recordJobError: "+shortError(e), e); | |||
activeJobs.remove(job.getCloud().getUuid()); | |||
activeJobs.remove(job.cacheKey()); | |||
} | |||
public File getPackerPublicKey () { return initPackerKey(true); } | |||
public File getPackerPrivateKey () { return initPackerKey(false); } | |||
public String getPackerPublicKeyHash () { return ShaUtil.sha256_file(getPackerPublicKey()); } | |||
private synchronized File initPackerKey(boolean pub) { | |||
final File keyDir = new File(System.getProperty("user.home"),".ssh"); | |||
if (!keyDir.exists()) mkdirOrDie(keyDir); | |||
chmod(keyDir, "700"); | |||
final File pubKeyFile = new File(keyDir, PACKER_KEY_NAME+".pub"); | |||
final File privateKeyFile = new File(keyDir, PACKER_KEY_NAME); | |||
if (!pubKeyFile.exists() || !privateKeyFile.exists()) { | |||
final String comment = configuration.getVersion() + "_" + configuration.getJarSha(); | |||
execScript("ssh-keygen -t rsa -q -N '' -C '"+comment+"' -f "+abs(privateKeyFile)); | |||
if (!pubKeyFile.exists() || !privateKeyFile.exists()) return die("initPackerKey: error creating packer key"); | |||
} | |||
return pub ? pubKeyFile : privateKeyFile; | |||
} | |||
} |
@@ -1 +1 @@ | |||
bubble.version=0.9.19 | |||
bubble.version=0.10.0 |
@@ -3,18 +3,6 @@ | |||
- name: Create new bubble node | |||
hosts: bubble | |||
remote_user: root | |||
gather_facts: no | |||
pre_tasks: | |||
- name: apt install python3 and python3-pip | |||
raw: sudo apt-get -y install python3 python3-pip virtualenv | |||
- name: pip install setuptools and psycopg2-binary | |||
raw: sudo pip3 install setuptools psycopg2-binary | |||
- name: gather facts | |||
setup: | |||
roles: | |||
{{#each roles}} - {{this}} | |||
{{/each}} |
@@ -28,7 +28,7 @@ if [[ ! -f "${ALGO_BASE}/config.cfg.hbs" ]] ; then | |||
fi | |||
log "Regenerating algo config..." | |||
java -cp /home/bubble/current/bubble.jar bubble.main.BubbleMain generate-algo-conf --algo-config ${ALGO_BASE}/config.cfg.hbs || die "Error writing algo config.cfg" | |||
java -cp /home/bubble/api/bubble.jar bubble.main.BubbleMain generate-algo-conf --algo-config ${ALGO_BASE}/config.cfg.hbs || die "Error writing algo config.cfg" | |||
log "Updating algo VPN users..." | |||
cd ${ALGO_BASE} && \ | |||
@@ -14,7 +14,7 @@ if [[ ! -f "${ALGO_BASE}/config.cfg.hbs" ]] ; then | |||
die "No ${ALGO_BASE}/config.cfg.hbs found" | |||
fi | |||
java -cp /home/bubble/current/bubble.jar bubble.main.BubbleMain generate-algo-conf --algo-config ${ALGO_BASE}/config.cfg.hbs || die "Error writing algo config.cfg" | |||
java -cp /home/bubble/api/bubble.jar bubble.main.BubbleMain generate-algo-conf --algo-config ${ALGO_BASE}/config.cfg.hbs || die "Error writing algo config.cfg" | |||
python3 -m virtualenv --python="$(command -v python3)" .env \ | |||
&& source .env/bin/activate \ | |||
@@ -13,7 +13,6 @@ | |||
mode: 0555 | |||
with_items: | |||
- { src: "full_reset_db.sh.j2", dest: "full_reset_db.sh" } | |||
- { src: "snapshot_ansible.sh.j2", dest: "snapshot_ansible.sh" } | |||
- name: Generate keys | |||
shell: random_password.sh /home/bubble/{{ item.file }} bubble {{ item.group }} | |||
@@ -28,7 +27,7 @@ | |||
- name: Write bubble env file | |||
template: | |||
src: bubble.env.j2 | |||
dest: /home/bubble/bubble_{{ bubble_version }}/bubble.env | |||
dest: /home/bubble/api/bubble.env | |||
owner: bubble | |||
group: bubble | |||
mode: 0400 | |||
@@ -45,20 +44,4 @@ | |||
- sage_node.json | |||
- sage_key.json | |||
- name: Initialize local storage with role archive | |||
shell: init_roles.sh | |||
- import_tasks: postgresql_data.yml | |||
- name: Install refresh_bubble_ssh_keys script | |||
template: | |||
src: refresh_bubble_ssh_keys.sh.j2 | |||
dest: /usr/local/sbin/refresh_bubble_ssh_keys.sh | |||
owner: root | |||
group: root | |||
mode: 0500 | |||
- name: Install refresh_bubble_ssh_keys_monitor supervisor conf file | |||
copy: | |||
src: supervisor_refresh_bubble_ssh_keys_monitor.conf | |||
dest: /etc/supervisor/conf.d/refresh_bubble_ssh_keys_monitor.conf |
@@ -5,6 +5,6 @@ stderr_logfile = /home/bubble/logs/bubble-err.log | |||
command=sudo -u bubble bash -c "/usr/bin/java \ | |||
-Dfile.encoding=UTF-8 -Djava.net.preferIPv4Stack=true \ | |||
-XX:+UseG1GC -XX:MaxGCPauseMillis=400 {{ bubble_java_opts }} \ | |||
-cp /home/bubble/current/bubble.jar \ | |||
-cp /home/bubble/api/bubble.jar \ | |||
bubble.server.BubbleServer \ | |||
/home/bubble/current/bubble.env" | |||
/home/bubble/api/bubble.env" |
@@ -1,8 +1,10 @@ | |||
# | |||
# Copyright (c) 2020 Bubble, Inc. All rights reserved. For personal (non-commercial) use, see license: https://getbubblenow.com/bubble-license/ | |||
# | |||
- name: Init certbot | |||
shell: init_certbot.sh {{ letsencrypt_email }} {{ server_name }} {{ server_alias }} | |||
- name: Ensure nginx is stopped | |||
service: | |||
name: nginx | |||
state: stopped | |||
# see https://weakdh.org/sysadmin.html | |||
- name: Create a strong dhparam.pem | |||
@@ -16,6 +18,9 @@ | |||
- include: site.yml | |||
- meta: flush_handlers # nginx has to be restarted right now if it has to | |||
- name: Init certbot | |||
shell: init_certbot.sh {{ letsencrypt_email }} {{ server_name }} {{ server_alias }} | |||
- name: Ensure nginx is restarted | |||
service: | |||
name: nginx | |||
@@ -3,20 +3,8 @@ | |||
- name: Create new bubble sage node | |||
hosts: bubble | |||
remote_user: root | |||
gather_facts: no | |||
vars: | |||
install_type: sage | |||
pre_tasks: | |||
- name: apt install python3 and python3-pip | |||
raw: sudo apt-get -y install python3 python3-pip virtualenv | |||
- name: pip install setuptools and psycopg2-binary | |||
raw: sudo pip3 install setuptools psycopg2-binary | |||
- name: gather facts | |||
setup: | |||
install_type: [[installType]] | |||
roles: | |||
[[#each roles]] - [[this]] | |||
[[/each]] |
@@ -28,7 +28,7 @@ if [[ ! -f "${ALGO_BASE}/config.cfg.hbs" ]] ; then | |||
fi | |||
log "Regenerating algo config..." | |||
java -cp /home/bubble/current/bubble.jar bubble.main.BubbleMain generate-algo-conf --algo-config ${ALGO_BASE}/config.cfg.hbs || die "Error writing algo config.cfg" | |||
java -cp /home/bubble/api/bubble.jar bubble.main.BubbleMain generate-algo-conf --algo-config ${ALGO_BASE}/config.cfg.hbs || die "Error writing algo config.cfg" | |||
log "Updating algo VPN users..." | |||
cd ${ALGO_BASE} && \ | |||
@@ -66,7 +66,7 @@ supervisorctl stop mitmdump | |||
# restore bubble.jar | |||
log "Restoring bubble.jar" | |||
cp ${RESTORE_BASE}/bubble.jar ${BUBBLE_HOME}/current/bubble.jar | |||
cp ${RESTORE_BASE}/bubble.jar ${BUBBLE_HOME}/api/bubble.jar | |||
# set wasRestored flag in self_node.json | |||
log "Adding wasRestored=true to ${SELF_NODE}" | |||
@@ -44,7 +44,7 @@ INSTALL_MODE=${4:?no install mode provided} | |||
DROP_AND_RECREATE=${5} | |||
BUBBLE_HOME=/home/bubble | |||
BUBBLE_JAR=/home/bubble/current/bubble.jar | |||
BUBBLE_JAR=/home/bubble/api/bubble.jar | |||
if [[ ! -f ${BUBBLE_JAR} ]] ; then | |||
die "Bubble jar not found: ${BUBBLE_JAR}" | |||
fi | |||
@@ -1,63 +0,0 @@ | |||
#!/bin/bash | |||
# | |||
# Copyright (c) 2020 Bubble, Inc. All rights reserved. For personal (non-commercial) use, see license: https://getbubblenow.com/bubble-license/ | |||
# | |||
SCRIPT="${0}" | |||
SCRIPT_DIR=$(cd $(dirname ${SCRIPT}) && pwd) | |||
LOG=/tmp/$(basename ${0}).log | |||
function die { | |||
echo 1>&2 "${1}" | |||
log "${1}" | |||
exit 1 | |||
} | |||
function log { | |||
echo "${1}" | tee -a ${LOG} | |||
} | |||
if [[ $(whoami) != "bubble" ]] ; then | |||
if [[ $(whoami) == "root" ]] ; then | |||
sudo -H -u bubble ${0} | |||
exit $? | |||
fi | |||
die "${0} must be run as bubble" | |||
fi | |||
if [[ -z "${LOCALSTORAGE_BASE_DIR}" ]] ; then | |||
if [[ -f "${HOME}/bubble/current/bubble.env" ]] ; then | |||
LOCALSTORAGE_BASE_DIR=$(cat "${HOME}/bubble/current/bubble.env" | grep -v '^#' | grep LOCALSTORAGE_BASE_DIR | awk -F '=' '{print $2}' | tr -d ' ') | |||
fi | |||
fi | |||
if [[ -z "${LOCALSTORAGE_BASE_DIR}" ]] ; then | |||
log "LOCALSTORAGE_BASE_DIR env var not defined, using ${HOME}/.bubble_local_storage" | |||
LOCALSTORAGE_BASE_DIR="${HOME}/.bubble_local_storage" | |||
fi | |||
if [[ -z "${BUBBLE_JAR}" ]] ; then | |||
if [[ -f "${HOME}/current/bubble.jar" ]] ; then | |||
BUBBLE_JAR="${HOME}/current/bubble.jar" | |||
fi | |||
fi | |||
if [[ -z "${BUBBLE_JAR}" ]] ; then | |||
die "BUBBLE_JAR env var not set and no jar file found" | |||
fi | |||
ROLE_DIR="${HOME}/role_tgz" | |||
if [[ ! -d "${ROLE_DIR}" ]] ; then | |||
die "role_tgz dir not found: ${ROLE_DIR}" | |||
fi | |||
NETWORK_UUID="$(cat ${HOME}/self_node.json | jq -r .network)" | |||
find ${ROLE_DIR} -type f -name "*.tgz" | while read role_tgz ; do | |||
path="automation/roles/$(basename ${role_tgz})" | |||
dest="${LOCALSTORAGE_BASE_DIR}/${NETWORK_UUID}/${path}" | |||
if [[ ! -f ${dest} ]] ; then | |||
mkdir -p $(dirname ${dest}) || die "Error creating destination directory" | |||
cp ${role_tgz} ${dest} || die "Error copying role archive" | |||
log "installed role ${role_tgz} -> ${dest}" | |||
else | |||
log "role already installed ${role_tgz} -> ${dest}" | |||
fi | |||
done |
@@ -50,14 +50,9 @@ if [[ ${KEY_COUNT} -eq 0 ]] ; then | |||
fi | |||
# Retain self-generated ansible setup key | |||
ANSIBLE_USER="{{admin_user}}" | |||
if [[ ! -z "${ANSIBLE_USER}" ]] ; then | |||
PUB_FILE="$(cd ~{{admin_user}} && pwd)/.ssh/bubble_rsa.pub" | |||
if [[ -f "${PUB_FILE}" ]] ; then | |||
cat "${PUB_FILE}" >> ${NEW_KEYS} | |||
fi | |||
else | |||
log "Warning: No ansible user defined, unable to retain setup key" | |||
PUB_FILE="$(cd ~root && pwd)/.ssh/bubble_rsa.pub" | |||
if [[ -f "${PUB_FILE}" ]] ; then | |||
cat "${PUB_FILE}" >> ${NEW_KEYS} | |||
fi | |||
mv ${NEW_KEYS} ${AUTH_KEYS} || die "Error moving ${NEW_KEYS} -> ${AUTH_KEYS}" |
@@ -17,7 +17,7 @@ | |||
system: yes | |||
home: /home/bubble | |||
- name: Creates bubble API version dir | |||
- name: Creates bubble API dir | |||
file: | |||
path: /home/bubble/api | |||
owner: bubble | |||
@@ -54,7 +54,6 @@ | |||
with_items: | |||
- "bsql.sh" | |||
- "random_password.sh" | |||
- "init_roles.sh" | |||
- name: Install standard bubble scripts | |||
copy: | |||
@@ -100,3 +99,15 @@ | |||
group: root | |||
mode: 0500 | |||
- name: Install refresh_bubble_ssh_keys script | |||
copy: | |||
src: refresh_bubble_ssh_keys.sh | |||
dest: /usr/local/sbin/refresh_bubble_ssh_keys.sh | |||
owner: root | |||
group: root | |||
mode: 0500 | |||
- name: Install refresh_bubble_ssh_keys_monitor supervisor conf file | |||
copy: | |||
src: supervisor_refresh_bubble_ssh_keys_monitor.conf | |||
dest: /etc/supervisor/conf.d/refresh_bubble_ssh_keys_monitor.conf |
@@ -18,7 +18,7 @@ | |||
- name: Install common packages | |||
apt: | |||
name: [ 'ntp', 'unzip', 'safe-rm', 'supervisor', 'emacs-nox', 'screen', 'xtail', 'fail2ban' ] | |||
name: [ 'cloud-init', 'ntp', 'unzip', 'safe-rm', 'supervisor', 'emacs-nox', 'screen', 'xtail', 'fail2ban' ] | |||
state: present | |||
update_cache: yes | |||
@@ -27,6 +27,14 @@ | |||
src: dot-screenrc | |||
dest: /root/.screenrc | |||
- name: Install packer key as only authorized key | |||
copy: | |||
src: packer_rsa | |||
dest: /root/.ssh/authorized_keys | |||
owner: root | |||
group: root | |||
mode: 0600 | |||
- name: Start common services | |||
service: | |||
name: '{{ item }}' | |||
@@ -0,0 +1,29 @@ | |||
#!/bin/bash | |||
SCRIPT="${0}" | |||
SCRIPT_DIR=$(cd $(dirname ${SCRIPT}) && pwd) | |||
LOG=/tmp/$(basename ${0}).log | |||
function die { | |||
echo 1>&2 "${1}" | |||
log "${1}" | |||
exit 1 | |||
} | |||
function log { | |||
echo "${1}" | tee -a ${LOG} | |||
} | |||
if [[ $(whoami) != "root" ]] ; then | |||
die "${0} must be run as root" | |||
fi | |||
ANSIBLE_USER_HOME=$(cd ~root && pwd) | |||
ANSIBLE_SNAPSHOT="/home/bubble/ansible.tgz" | |||
cd ${ANSIBLE_USER_HOME} \ | |||
&& tar czf ${ANSIBLE_SNAPSHOT} ./ansible \ | |||
&& chmod 400 ${ANSIBLE_SNAPSHOT} \ | |||
&& chown bubble ${ANSIBLE_SNAPSHOT} \ | |||
|| die "Error creating ansible snapshot" |
@@ -14,9 +14,17 @@ | |||
mode: 0550 | |||
when: install_type == 'node' | |||
- name: Install snapshot_ansible.sh | |||
copy: | |||
src: snapshot_ansible.sh | |||
dest: /usr/local/bin/snapshot_ansible.sh | |||
owner: root | |||
group: root | |||
mode: 0500 | |||
- name: Install bubble-nodemanager | |||
copy: | |||
src: "bubble-nodemanager" | |||
src: bubble-nodemanager | |||
dest: /usr/sbin/bubble-nodemanager | |||
owner: root | |||
group: root | |||