PIPIONE
我在网上找到的博客的帮助下进行了管理,该博客的链接已丢失,但有代码。import timeimport boto3import paramikoimport osdef lambda_handler(event, context): ec2 = boto3.resource('ec2', region_name='us-east-1',aws_access_key_id='XXXXXXXXXXXXXXXXXXXX',aws_secret_access_key='XXXXXXXXXXXXXXXXXXXX') instance_id = 'XXXXXXXXXXXXXXXX' instance = ec2.Instance(instance_id) # Start the instance instance.start() # Giving some time to start the instance completely #time.sleep(60) # Connect to S3, we will use it get the pem key file of your ec2 instance s3_client = boto3.client('s3',aws_access_key_id='XXXXXXXXXXXXXXXXXXXX',aws_secret_access_key='XXXXXXXXXXXXXXXXXXXX') # # # Download private key file from secure S3 bucket # # # and save it inside /tmp/ folder of lambda event bucket_name = '' key_name = '' key_location = '' s3_client.download_file(bucket_name, key_name, key_location) # # # # Allowing few seconds for the download to complete time.sleep(10) ssh = paramiko.SSHClient() ssh.set_missing_host_key_policy(paramiko.AutoAddPolicy()) privkey = paramiko.RSAKey.from_private_key_file(key_location) # # # username is most likely 'ec2-user' or 'root' or 'ubuntu' # # # depending upon yor ec2 AMI ssh.connect(instance.private_ip_address,22, username='ubuntu', pkey=privkey) commands = [] for command in commands: print("Executing {}".format(command)) stdin , stdout, stderr = ssh.exec_command(command) stdin.flush() data = stdout.read().splitlines() for line in data: print(line) ssh.close() return 'Success'现在只需压缩 paramiko 库。如果我再次找到该博客,将更新答案。