How can I use an L2 Secret created with Secrets Manager to resolve as an L1 Cfn Property value?
from aws_cdk import (
core,
aws_secretsmanager as secretsmanager,
aws_elasticache as elasticache
)
class MyStack(core.Stack):
def __init__(self, scope: core.Construct, id: str, **kwargs) -> None:
super().__init__(scope, id, **kwargs)
redis_password = secretsmanager.Secret(
self, "RedisPassword",
description="Redis auth",
generate_secret_string=secretsmanager.SecretStringGenerator(
exclude_characters='/"#'
)
)
self.redis = elasticache.CfnReplicationGroup(self, 'RedisCluster',
auth_token=redis_password.secret_value,
# other properties
)
This gives the error
jsii.errors.JSIIError: Object of type #aws-cdk/aws-secretsmanager.Secret is not convertible to #aws-cdk/core.CfnElement
In Cloudformation to resolve a secret I'd use something like
AuthToken: !Sub '{{resolve:secretsmanager:${MySecret}::password}}'
But a L2 Secret doesn't output the Cfn Ref like L1 constructs do (that I know of)
What am I missing?
I was only missing the to_string() method
from aws_cdk import (
core,
aws_secretsmanager as secretsmanager,
aws_elasticache as elasticache
)
class MyStack(core.Stack):
def __init__(self, scope: core.Construct, id: str, **kwargs) -> None:
super().__init__(scope, id, **kwargs)
redis_password = secretsmanager.Secret(
self, "RedisPassword",
description="Redis auth",
generate_secret_string=secretsmanager.SecretStringGenerator(
exclude_characters='/"#'
)
)
self.redis = elasticache.CfnReplicationGroup(self, 'RedisCluster',
auth_token=redis_password.secret_value.to_string(),
# other properties
)
This synthesizes to
{
"RedisPasswordED621C10": {
"Type": "AWS::SecretsManager::Secret",
"Properties": {
"Description": "Redis auth",
"GenerateSecretString": {
"ExcludeCharacters": "/\"#"
}
},
"Metadata": {
"aws:cdk:path": "my-cdk-stack/RedisPassword/Resource"
}
},
"RedisCluster": {
"Type": "AWS::ElastiCache::ReplicationGroup",
"Properties": {
"ReplicationGroupDescription": "RedisGroup",
"AtRestEncryptionEnabled": true,
"AuthToken": {
"Fn::Join": [
"",
[
"{{resolve:secretsmanager:",
{
"Ref": "RedisPasswordED621C10"
},
":SecretString:::}}"
]
]
},
"OtherProps": "..."
}
}
}
Related
I am writing unit test cases for my fastapi project and unable to mock a dynamodb call.
File_1
This file has all the methods to perform DynamoDB actions using boto3 calls.
class DynamoDBRepository:
Insert Item - Inserts value
Get Item - Returns value
#File_2
Has a "AppConfig" class which will be used as a dependency in a later file
from file_1 import DynamoDBRepository
class AppConfig:
def __init__(self) -> None:
"""Constructor class to instantiate dynamodb"""
self._job_table = "Dynamo_DB_Table"
self._region = "Table_region"
self._dynamodb_repository = DynamoDBRepository(table=self._job_table, region=self._region) # created a object for the dynamodb class mentioned in file 1.
File_3:
This file has the fast_api route decorator
from file_2 import AppConfig
#router.get(
"/object/{object_id}"
)
def get_request(
object_id: str,
objects: AppConfig = Depends(AppConfig),
) -> ObjectBody:
try:
object_detail = objects._dynamodb_repository.get_item({"object_id": object_id})
return object_detail["Item"]
I am trying to mock the get_item method in my test file:
File_4
This is my test file in which
client = TestClient(fast_api_app)
class MockAppConfig:
def __init__(self) -> None:
"""Constructor class to instantiate dynamodb and lambda"""
self._job_table = "Dynamo_DB_Table"
self._region = "Table_region"
self._dynamodb_repository = DynamoDBRepository(table=self._job_table, region=self._region)
def test_get_request():
fast_api_app.dependency_overrides[AppConfig] = MockAppConfig
MockAppConfig()._dynamodb_repository.get_item = {
"id": "1234",
"title": "Foo",
"description": "Hello",
}
response = client.get("/objects/1234")
assert response.status_code == 200
assert response.json() == {
"id": "foo",
"title": "Foo",
"description": "Hi",
}
The mocking of get_item is not working and it is still querying the original db and failing due to credential check.
I tried monkeypatch & fastapi_dep fixtures, also patching but somehow the get_item method mocking isn't working
Will mocking get_item method work?
class MockDynamoDBRepository():
def get_item(*args, **kwargs):
return {
"Item": {
"id": "foo",
"title": "Foo",
"description": "Hi",
}
}
class MockAppConfig:
def __init__(self) -> None:
"""Constructor class to instantiate dynamodb and lambda"""
self._job_table = "Dynamo_DB_Table"
self._region = "Table_region"
self._dynamodb_repository = MockDynamoDBRepository(table=self._job_table, region=self._region)
def test_get_request():
fast_api_app.dependency_overrides[AppConfig] = MockAppConfig
response = client.get("/objects/1234")
assert response.status_code == 200
assert response.json() == {
"id": "foo",
"title": "Foo",
"description": "Hi",
}
Building on #svfat's answer, here is how you can do the test with fastapi_dep - pick any one of the test approaches - with clause or indirect parameter:
class MockDynamoDBRepository():
def __init__(self, *args, **kwargs):
pass
def get_item(self, *args, **kwargs):
return {
"Item": {
"id": "foo",
"title": "Foo",
"description": "Hi",
}
}
class MockAppConfig:
def __init__(self) -> None:
"""Constructor class to instantiate dynamodb and lambda"""
self._job_table = "Mock Dynamo_DB_Table"
self._region = "Mock Table_region"
self._dynamodb_repository = MockDynamoDBRepository(table=self._job_table,
region=self._region)
def test_get_request_deps(fastapi_dep):
with fastapi_dep(fast_api_app).override(
{
AppConfig: MockAppConfig,
}
):
response = client.get("/objects/1234")
assert response.status_code == 200
assert response.json() == {
"id": "foo",
"title": "Foo",
"description": "Hi",
}
#pytest.mark.parametrize(
"fastapi_dep",
[
(
fast_api_app,
{AppConfig: MockAppConfig},
)
],
indirect=True,
)
def test_get_request_deps_indirect(fastapi_dep):
response = client.get("/objects/1234")
assert response.status_code == 200
assert response.json() == {
"id": "foo",
"title": "Foo",
"description": "Hi",
}
If you don't want to create all the extra classes, you can use the pure mock approach like so:
from mock.mock import MagicMock
def test_get_request_deps_mock(fastapi_dep):
my_mock = MagicMock()
my_mock._dynamodb_repository.get_item.return_value = {
"Item": {
"id": "foo",
"title": "Foo",
"description": "Hi",
}
}
with fastapi_dep(file_3.app).override(
{
AppConfig: lambda: my_mock,
}
):
response = client.get("/objects/1234")
assert response.status_code == 200
assert response.json() == {
"id": "foo",
"title": "Foo",
"description": "Hi",
}
I have 4 tables: Hardware, SoftwareName, SoftwareVersion, and Software.
The Software table has an one-to-many relationship with SoftwareName table and SoftwareVersion table. Finally, the Hardware model has an one-to-many relationship with Software table.
I'm trying to get just a specific column from a model relationship using Pydantic Schema.
Now I'm getting this output:
[
{
"id": 1,
"hostname": "hostname2",
"softwares": [
{
"id": 1,
"software_name": {
"id": 1,
"name": "nginx"
},
"software_version": {
"id": 1,
"version": "2.9"
}
},
{
"id": 2,
"software_name": {
"id": 2,
"name": "vim"
},
"software_version": {
"id": 2,
"version": "0.3"
}
},
{
"id": 3,
"software_name": {
"id": 3,
"name": "apache"
},
"software_version": {
"id": 3,
"version": "1.0"
}
}
]
}
]
But what I expect is this output:
[
{
"id": 1,
"hostname": "hostname2",
"softwares": [
{
"id": 1,
"name": "nginx",
"version": "2.9"
},
{
"id": 2,
"name": "vim",
"version": "0.3"
},
{
"id": 3,
"name": "apache",
"version": "1.0"
}
]
}
]
I have the file main.py:
import uvicorn
from typing import Any, Iterator, List, Optional
from faker import Faker
from fastapi import Depends, FastAPI
from pydantic import BaseModel
from sqlalchemy import Column, ForeignKey, Integer, String, create_engine
from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy.orm import Session, sessionmaker, relationship
from faker.providers import DynamicProvider
software_name = DynamicProvider(
provider_name="software_name",
elements=["bash", "vim", "vscode", "nginx", "apache"],
)
software_version = DynamicProvider(
provider_name="software_version",
elements=["1.0", "2.9", "1.1", "0.3", "2.0"],
)
hardware = DynamicProvider(
provider_name="hardware",
elements=["hostname1", "hostname2", "hostname3", "hostname4", "hostname5"],
)
fake = Faker()
# then add new provider to faker instance
fake.add_provider(software_name)
fake.add_provider(software_version)
fake.add_provider(hardware)
engine = create_engine("sqlite:///.db", connect_args={"check_same_thread": False})
SessionLocal = sessionmaker(autocommit=True, autoflush=True, bind=engine)
Base = declarative_base(bind=engine)
class Software(Base):
__tablename__ = 'software'
id = Column(Integer, primary_key=True)
hardware_id = Column(Integer, ForeignKey('hardware.id'))
name_id = Column(Integer, ForeignKey('software_name.id'))
version_id = Column(Integer, ForeignKey('software_version.id'))
software_name = relationship('SoftwareName', backref='software_name')
software_version = relationship('SoftwareVersion',
backref='software_version')
class SoftwareName(Base):
__tablename__ = 'software_name'
id = Column(Integer, primary_key=True)
name = Column(String)
class SoftwareVersion(Base):
__tablename__ = 'software_version'
id = Column(Integer, primary_key=True)
version = Column(String)
class Hardware(Base):
__tablename__ = "hardware"
id = Column(Integer, primary_key=True, autoincrement=True)
hostname = Column(String, nullable=False)
softwares = relationship(Software)
Base.metadata.drop_all()
Base.metadata.create_all()
class BaseSchema(BaseModel):
id: int
class Config:
orm_mode = True
class SoftwareNameSchema(BaseSchema):
name: str
class SoftwareVersionSchema(BaseSchema):
version: str
class SoftwareSchema(BaseSchema):
software_name: SoftwareNameSchema
software_version: SoftwareVersionSchema
class HardwareOut(BaseSchema):
hostname: str
softwares: List[SoftwareSchema]
app = FastAPI()
#app.on_event("startup")
def on_startup() -> None:
session = SessionLocal()
for _ in range(10):
software_list = []
for _ in range(3):
sn = SoftwareName(name=fake.software_name())
sv = SoftwareVersion(version=fake.software_version())
s = Software(software_name=sn, software_version=sv)
software_list.append(s)
h = Hardware(hostname=fake.hardware(), softwares=software_list)
session.add(h)
session.flush()
session.close()
def get_db() -> Iterator[Session]:
db = SessionLocal()
try:
yield db
finally:
db.close()
#app.get("/hardwares", response_model=List[HardwareOut])
def get_hardwares(db: Session = Depends(get_db)) -> Any:
return [HardwareOut.from_orm(hardware) for hardware in db.query(Hardware).all()]
How can I change the HardwareOut Schema to return what I expect?
I finally got the answer I wanted.
I added 2 changes to get it:
Use the Union type from typing lib for the attributes software_name e software_version like that:
Add a Pydantic validator for each field to change the returned value, like that:
from typing import Union
from pydantic import validator
...
class SoftwareSchema(BaseSchema):
software_name: Union[str, SoftwareNameSchema]
software_version: Union[str, SoftwareVersionSchema]
#validator('software_name')
def name_to_str(cls, v, values, **kwargs):
return v.name if not isinstance(v, str) else v
#validator('software_version')
def version_to_str(cls, v, values, **kwargs):
return v.version if not isinstance(v, str) else v
...
And the answer was this:
[
{
"id": 1,
"hostname": "hostname2",
"softwares": [
{
"id": 1,
"software_name": "nginx",
"software_version": "2.9"
},
{
"id": 2,
"software_name": "vim",
"software_version": "0.3"
},
{
"id": 3,
"software_name": "apache",
"software_version": "1.0"
}
]
}
]
update:
As an improvement, I add an alias for each attribute for a better semantic response. So, I change software_name to name and software_version to version. Like this:
from typing import Union
from pydantic import validator
...
class SoftwareSchema(BaseSchema):
software_name: Union[str, SoftwareNameSchema] = Field(None, alias="name")
software_version: Union[str, SoftwareVersionSchema] = Field(None, alias="version")
#validator('software_name')
def name_to_str(cls, v, values, **kwargs):
return v.name if not isinstance(v, str) else v
#validator('software_version')
def version_to_str(cls, v, values, **kwargs):
return v.version if not isinstance(v, str) else v
...
Hi I am using the django-graphql-social-auth library and whenever I create a social user using this mutation:
import graphene
import graphql_social_auth
class Mutations(graphene.ObjectType):
social_auth = graphql_social_auth.SocialAuthJWT.Field()
and this Graphql mutation:
mutation SocialAuth($provider: String!, $accessToken: String!) {
socialAuth(provider: $provider, accessToken: $accessToken) {
social {
uid
}
token
refreshToken
}
}
I get this error:
{
"errors": [
{
"message": "Cannot query field \"refreshToken\" on type \"SocialAuthJWT\".",
"locations": [
{
"line": 29,
"column": 5
}
]
}
]
}
Thanks in advance for your help.
I solved it as follows:
# Social authentication
class SocialAuth(graphql_social_auth.SocialAuthMutation):
token = graphene.String()
refresh_token = graphene.String()
#classmethod
def resolve(cls, root, info, social, **kwargs):
if social.user.refresh_tokens.count() >= 1:
return cls(token=get_token(social.user), refresh_token=social.user.refresh_tokens.last())
else:
return cls(token=get_token(social.user), refresh_token=create_refresh_token(social.user))
I want to create a generic endpoint definition in Fast API Python that reads URL path parameter and then calls a specific method to do a derealisation.
But I always get
422 Unprocessable Entity
So I expect that it works like so:
/answer/aaa -> handle_generic_answer -> read_item_aaa, type body to ModelAAA
/answer/bbb -> handle_generic_answer -> read_item_bbb, type body to ModelBBB
etc.
Here's the generic endpoint code:
#app.post("/answer/{type}")
def handle_generic_answer(type: str, item):
# I also tried
# def handle_generic_answer(type: str, item: Any):
# or
# def handle_generic_answer(type: str, item: Optional):
switcher = {
'aaaa': read_item_aaa,
'bbb': read_item_bbb,
'nothing': unrecognised_answer
}
func = switcher.get(type, unrecognised_answer)
print('answer >> ' + type)
func(item)
then I have separate methods called based on a type value:
def read_item_aaa(item: ModelAAA):
update_aaa(item)
return {"type": "aaa", "result": "success"}
def read_item_bbb(item: ModelBBB):
update_bbb(item)
return {"type": "bbb", "result": "success"}
and a default -
def unrecognised_answer(type):
print("unrecognised_answer")
raise HTTPException(status_code=400, detail="answer type not found")
return {}
models are defined like this:
from pydantic import BaseModel, Field
class ModelAAA(BaseModel):
field1: str
field2: list = []
But whether I call
http://localhost:8000/answer/aaa
or http://localhost:8000/answer/some-other-url
I always get 422:
{
"detail": [
{
"loc": [
"query",
"item"
],
"msg": "field required",
"type": "value_error.missing"
}
]
}
You forgot to annotate body parameter item.
Without this item is treated as query str parameter. For example:
#app.post("/answer/{type}")
def handle_generic_answer(type: str, item: Union[ModelAAA, ModelBBB]):
I got the below code using aws-cdk Python language but is failing to deploy, with redis_container not available error, what am I doing wrong, I want redis container to start first and then the rest. maybe my understanding of the container dependency is not correct??
ecs_redis_task = ecs.FargateTaskDefinition(self,
id = 'redis',
cpu=512,
memory_limit_mib =1024
)
redis_container = ecs_redis_task.add_container(id = 'redis_container',
image = img_.from_ecr_repository(repository=repo_, tag='redis_5.0.5')
)
redis_container.add_port_mappings({
'containerPort' : 6379
})
redis_dependency = ecs.ContainerDependency(container = redis_container, condition = ecs.ContainerDependencyCondition.HEALTHY)
ecs_webserver_task = ecs.FargateTaskDefinition(self,
id = 'webserver',
cpu=256,
memory_limit_mib =512
)
webserver_container = ecs_webserver_task.add_container(id = 'webserver_container',
image = img_.from_ecr_repository(repository=repo_, tag='airflow_1.10.9')
)
webserver_container.add_port_mappings({
'containerPort' : 8080
})
webserver_container.add_container_dependencies(redis_dependency)
If I remove the dependency code, it deploys fine!
Error:
12/24 | 2:46:51 PM | CREATE_FAILED | AWS::ECS::TaskDefinition | webserver (webserverEE139216) Cannot depend on container + 'redis_container' because it does not exist (Service: AmazonECS; Status Code: 400; Error Code: ClientException; Request ID: 81828979-9e65-474e-ab0e-b163168d5613)
I just tried this code and it works as expected adding the dependency in the Task definition, only thing changed from your code is the image 1:
from aws_cdk import (
#aws_s3 as s3,
aws_ecs as ecs,
core
)
class HelloCdkStack(core.Stack):
def __init__(self, scope: core.Construct, id: str, **kwargs) -> None:
super().__init__(scope, id, **kwargs)
#bucket = s3.Bucket(self, "MyFirstBucket", versioned=True,)
ecs_redis_task = ecs.FargateTaskDefinition(self, id='redis', cpu=512, memory_limit_mib=1024)
redis_container = ecs_redis_task.add_container(id = 'redis_container', image=ecs.ContainerImage.from_registry("amazon/amazon-ecs-sample"),)
redis_container.add_port_mappings({
'containerPort' : 6379
})
redis_dependency = ecs.ContainerDependency(container = redis_container, condition = ecs.ContainerDependencyCondition.HEALTHY)
ecs_webserver_task = ecs.FargateTaskDefinition(self, id='webserver', cpu=256, memory_limit_mib=512)
webserver_container = ecs_webserver_task.add_container(id = 'webserver_container', image=ecs.ContainerImage.from_registry("amazon/amazon-ecs-sample"),)
webserver_container.add_port_mappings({
'containerPort' : 8080
})
webserver_container.add_container_dependencies(redis_dependency)
CloudFormation after cdk synth:
"webserverEE139216": {
"Type": "AWS::ECS::TaskDefinition",
"Properties": {
"ContainerDefinitions": [
{
"DependsOn": [
{
"Condition": "HEALTHY",
"ContainerName": "redis_container"
}
],
"Essential": true,
"Image": "amazon/amazon-ecs-sample",
"Name": "webserver_container",
"PortMappings": [
{
"ContainerPort": 8080,
"Protocol": "tcp"
}
]
}
],