Komplett guide til å optimalisere sky-AI kostnader med praktiske strategier, verktøy og norske case studies som viser 67% kostnadsreduksjon.
Optimalisere instansstørrelser basert på faktisk bruk
Bruke spot instances for ikke-kritiske ML workloads
Automatisk skalering basert på trafikk og behov
Intelligent data lifecycle management og tiering
Forhåndsbestille kapasitet for stabile workloads
Optimalisere regionsvalg for kostnader og latency
import boto3
import pandas as pd
from datetime import datetime, timedelta
class AWSCostOptimizer:
def __init__(self, region='eu-north-1'):
self.ec2 = boto3.client('ec2', region_name=region)
self.cloudwatch = boto3.client('cloudwatch', region_name=region)
self.ce = boto3.client('ce', region_name='us-east-1')
def analyze_instance_utilization(self, instance_ids, days=30):
"""Analyser CPU og minnebruk for EC2 instances"""
end_time = datetime.utcnow()
start_time = end_time - timedelta(days=days)
utilization_data = []
for instance_id in instance_ids:
# Hent CPU utilization
cpu_response = self.cloudwatch.get_metric_statistics(
Namespace='AWS/EC2',
MetricName='CPUUtilization',
Dimensions=[{'Name': 'InstanceId', 'Value': instance_id}],
StartTime=start_time,
EndTime=end_time,
Period=3600,
Statistics=['Average', 'Maximum']
)
if cpu_response['Datapoints']:
avg_cpu = sum(d['Average'] for d in cpu_response['Datapoints']) / len(cpu_response['Datapoints'])
max_cpu = max(d['Maximum'] for d in cpu_response['Datapoints'])
# Hent instance detaljer
instance_details = self.ec2.describe_instances(InstanceIds=[instance_id])
instance_type = instance_details['Reservations'][0]['Instances'][0]['InstanceType']
utilization_data.append({
'InstanceId': instance_id,
'InstanceType': instance_type,
'AvgCPU': avg_cpu,
'MaxCPU': max_cpu,
'Recommendation': self._get_recommendation(avg_cpu, max_cpu, instance_type)
})
return pd.DataFrame(utilization_data)
def _get_recommendation(self, avg_cpu, max_cpu, current_type):
"""AI-baserte anbefalinger for instance sizing"""
if avg_cpu < 20 and max_cpu < 60:
return f"DOWNSIZE: Vurder mindre instance type (potensial besparelse: 30-50%)"
elif avg_cpu > 70 or max_cpu > 90:
return f"UPSIZE: Vurder større instance type for bedre ytelse"
else:
return "OPTIMAL: Nåværende størrelse er passende"
def get_spot_savings_opportunities(self):
"""Identifiser workloads som kan bruke spot instances"""
running_instances = self.ec2.describe_instances(
Filters=[{'Name': 'instance-state-name', 'Values': ['running']}]
)
spot_candidates = []
for reservation in running_instances['Reservations']:
for instance in reservation['Instances']:
# Sjekk tags for workload type
tags = {tag['Key']: tag['Value'] for tag in instance.get('Tags', [])}
if self._is_spot_candidate(tags, instance):
spot_price = self._get_spot_price(instance['InstanceType'])
on_demand_price = self._get_on_demand_price(instance['InstanceType'])
potential_savings = ((on_demand_price - spot_price) / on_demand_price) * 100
spot_candidates.append({
'InstanceId': instance['InstanceId'],
'InstanceType': instance['InstanceType'],
'CurrentPrice': on_demand_price,
'SpotPrice': spot_price,
'PotentialSavings': f"{potential_savings:.1f}%",
'MonthlyNOKSavings': (on_demand_price - spot_price) * 24 * 30 * 11.2 # USD to NOK
})
return spot_candidates
def _is_spot_candidate(self, tags, instance):
"""AI logikk for å identifisere spot-egnede workloads"""
# Batch processing, ML training, development/testing
workload_type = tags.get('WorkloadType', '').lower()
environment = tags.get('Environment', '').lower()
spot_friendly_workloads = ['batch', 'ml-training', 'analytics', 'etl']
spot_friendly_envs = ['dev', 'test', 'staging']
return (
any(workload in workload_type for workload in spot_friendly_workloads) or
any(env in environment for env in spot_friendly_envs) or
'interruptible' in tags.get('Attributes', '').lower()
)
def calculate_reserved_instance_savings(self):
"""Beregn potensielle RI besparelser"""
# Hent kostnadshisotrikk
response = self.ce.get_cost_and_usage(
TimePeriod={
'Start': (datetime.now() - timedelta(days=90)).strftime('%Y-%m-%d'),
'End': datetime.now().strftime('%Y-%m-%d')
},
Granularity='MONTHLY',
Metrics=['BlendedCost'],
GroupBy=[{'Type': 'DIMENSION', 'Key': 'SERVICE'}]
)
# Analyser EC2 kostnader
ec2_costs = []
for result in response['ResultsByTime']:
for group in result['Groups']:
if 'Amazon Elastic Compute Cloud' in group['Keys'][0]:
monthly_cost = float(group['Metrics']['BlendedCost']['Amount'])
ec2_costs.append(monthly_cost)
if ec2_costs:
avg_monthly_ec2 = sum(ec2_costs) / len(ec2_costs)
# Estimat RI besparelser (30-60% typisk)
ri_1_year_savings = avg_monthly_ec2 * 12 * 0.35 # 35% besparelse
ri_3_year_savings = avg_monthly_ec2 * 12 * 0.55 # 55% besparelse
return {
'CurrentAnnualEC2Cost': avg_monthly_ec2 * 12,
'RI_1Year_Savings': ri_1_year_savings,
'RI_3Year_Savings': ri_3_year_savings,
'RI_1Year_NOK': ri_1_year_savings * 11.2,
'RI_3Year_NOK': ri_3_year_savings * 11.2
}
return None
# Eksempel på bruk
if __name__ == "__main__":
optimizer = AWSCostOptimizer()
# Analyser instance utilization
instance_ids = ['i-1234567890abcdef0', 'i-0987654321fedcba0']
utilization_df = optimizer.analyze_instance_utilization(instance_ids)
print("Instance Utilization Analysis:")
print(utilization_df)
# Finn spot opportunities
spot_opportunities = optimizer.get_spot_savings_opportunities()
total_monthly_savings = sum(opp['MonthlyNOKSavings'] for opp in spot_opportunities)
print(f"\nTotal månedlig besparelse med spot instances: {total_monthly_savings:,.0f} NOK")
# Beregn RI besparelser
ri_analysis = optimizer.calculate_reserved_instance_savings()
if ri_analysis:
print(f"\nReserved Instance besparelser:")
print(f"1-år RI: {ri_analysis['RI_1Year_NOK']:,.0f} NOK årlig")
print(f"3-år RI: {ri_analysis['RI_3Year_NOK']:,.0f} NOK årlig")Stor norsk bank
Logistikkoptimalisering