brownie:ValueError: execution reverted: VM Exception while processing transaction: revert - python

Macbook Pro : Monterey
Intel Core i7
Brownie v1.17.2
I am learning solidity according to reference(https://www.youtube.com/watch?v=M576WGiDBdQ&t=25510s).
What I tried to do here, is use brownie to deploy a contract(FundMe) in a script (deploy.py),then write a test script(scripts/fund_and_withdraw.py.py)
I met the same error,the MockV3Aggregator deployed successfully, but the getEntrancePrice is 0
Googled it find this answer,don't quite follow.(https://ethereum.stackexchange.com/questions/114889/deploying-ganache-local-w-brownie-vm-exception-while-processing-transaction-in)
getPrice() isn't returning a number you want from the mock, somewhere in the vicinity of 2B. Think this is a bug with the Ganache implementation- performing the calculation (minimumUSD * precision) / price in getEntranceFee() gives you a number less than 1- and, since Solidity can't handle floats, Solidity simply sees it as a 0, and the whole thing errors out.
scripts/fund_and_withdraw.py
from brownie import FundMe
from scripts.helpful_scripts import get_account
def fund():
fund_me = FundMe[-1]
account = get_account()
entrance_fee = fund_me.getEntranceFee()
print(f"The entrance fee is : {entrance_fee} !")
print("funding")
fund_me.fund({"from": account, "value": entrance_fee})
print(f"Funded {entrance_fee} !")
def withdraw():
fund_me = FundMe[-1]
account = get_account()
fund_me.withdraw({"from": account})
def main():
fund()
withdraw()
deploy.py
from brownie import FundMe, network, config, MockV3Aggregator
from scripts.helpful_scripts import (
get_account,
deploy_mocks,
LOCAL_BLOCKCHAIN_ENVIRONMENT,
)
def deploy_fund_me():
account = get_account()
# if we have a persistent network like rinkeby,use the associated address
# otherwise ,deploy mocks
if network.show_active() not in LOCAL_BLOCKCHAIN_ENVIRONMENT:
price_feed_address = config["networks"][network.show_active()][
"eth_usd_price_feed"
]
else:
deploy_mocks()
# just use the latest mockV3Aggregator
price_feed_address = MockV3Aggregator[-1].address
print("***********************************************************")
print(f"MockVeAggrator's address is {price_feed_address}")
fund_me = FundMe.deploy(
price_feed_address,
{"from": account},
publish_source=config["networks"][network.show_active()].get("verify"),
)
print("***********************************************************")
print(f"The Ether price is :{fund_me.getPrice()}\n")
print(f"Contract deployed to {fund_me.address}\n")
entrance_fee = fund_me.getEntranceFee()
print("***********************************************************")
print(f"The entrance fee is : {entrance_fee} !\n")
return fund_me
def main():
deploy_fund_me()
FundMe.sol
// SPDX-License-Identifier: MIT
pragma solidity 0.8.0;
// we need tell brownie #chainlink means == what input in config,need to tell compiler
import "#chainlink/contracts/src/v0.6/interfaces/AggregatorV3Interface.sol";
import "#chainlink/contracts/src/v0.6/vendor/SafeMathChainlink.sol";
contract FundMe {
//using SafeMathChainlink for uint256;
mapping(address => uint256) public addressToAmountFunded;
address[] public funders;
address public owner;
AggregatorV3Interface public priceFeed;
// if you're following along with the freecodecamp video
// Please see https://github.com/PatrickAlphaC/fund_me
// to get the starting solidity contract code, it'll be slightly different than this!
constructor(address _priceFeed) {
// make price feed a parameter
priceFeed = AggregatorV3Interface(_priceFeed);
owner = msg.sender;
}
function fund() public payable {
uint256 mimimumUSD = 50 * 10**18;
require(
getConversionRate(msg.value) >= mimimumUSD,
"You need to spend more ETH!"
);
addressToAmountFunded[msg.sender] += msg.value;
funders.push(msg.sender);
}
function getVersion() public view returns (uint256) {
return priceFeed.version();
}
function getPrice() public view returns (uint256) {
(, int256 answer, , , ) = priceFeed.latestRoundData();
return uint256(answer * 10000000000);
}
// 1000000000
function getConversionRate(uint256 ethAmount)
public
view
returns (uint256)
{
uint256 ethPrice = getPrice();
uint256 ethAmountInUsd = (ethPrice * ethAmount) / 1000000000000000000;
return ethAmountInUsd;
}
function getEntranceFee() public view returns (uint256) {
// mimimumUSD
uint256 mimimumUSD = 50 * 10**18;
uint256 price = getPrice();
uint256 precision = 1 * 10**18;
return (mimimumUSD * precision) / price;
}
modifier onlyOwner() {
require(msg.sender == owner);
_;
}
function withdraw() public payable onlyOwner {
payable(msg.sender).transfer(address(this).balance);
for (
uint256 funderIndex = 0;
funderIndex < funders.length;
funderIndex++
) {
address funder = funders[funderIndex];
addressToAmountFunded[funder] = 0;
}
funders = new address[](0);
}
}
update
the error disappear magicly (may be come back later),right now the error info is :list out of index
actually i faced the same error in another project (Brownie test IndexError: list index out of range)
according the answer and brownie docs, I need to add account.
what confused me is if I launched the ganache local blockchain already ,why I still need to add account or I added the wrong way?

function getEntranceFee() public view returns (uint256) {
// mimimumUSD
uint256 mimimumUSD = 50 * 10**18;
uint256 price = getPrice();
uint256 precision = 1 * 10**18;
return (mimimumUSD * precision) / price;
}
You do not need to multiply with precison. currently, assuming eth price 3000, you are returning
(50 * (10^18) * (10^18)) / (3000 * 10^10)
(50 * (10^36)) / (3 * 10^13)
(50/3)*(10^23)
I think your return value should be
return mimimumUSD / price;

Hi I'm following your same course, I too had fallen into the same problem as you. I forgot to replace the address with the variable price_feed_address here:
deploy.py
fund_me = FundMe.deploy(
price_feed_address, # <--- before there was "0x8A..ect"
{"from": account},
publish_source=config["networks"][network.show_active()].get("verify")
)
Now everything works.
The only difference I found in our codes is here:
deploy.py
from brownie import FundMe, MockV3Aggregator, network, config
from scripts.helpful_script import (
get_account,
deploy_mocks,
LOCAL_BLOCKCHAIN_ENVIRONMENTS
)
def deploy_fund_me():
account = get_account()
# pass the price feed address to our fundme contract
# if we are on a persistent network like rinkeby, use the associated address
# otherwise, deploy mocks
if network.show_active() not in LOCAL_BLOCKCHAIN_ENVIRONMENTS:
price_feed_address = config["networks"][network.show_active()][
"eth_usd_price_feed"
]
else:
deploy_mocks()
price_feed_address = MockV3Aggregator[-1].address
fund_me = FundMe.deploy(
price_feed_address,
{"from": account},
publish_source=config["networks"][network.show_active()].get("verify")
)
print(f"Contract deployed to {fund_me.address}")
return fund_me
def main():
deploy_fund_me()
You call:
entrance_fee = fund_me.getEntranceFee()

If you're getting the getEntrancePrice as 0, remove the web3.toWei function in this part right here in the helpful_scripts.py file:
MockV3Aggregator.deploy(
DECIMALS, Web3.toWei(STARTING_PRICE, "ether"), {"from": get_account()}
)
and change to this:
MockV3Aggregator.deploy(DECIMALS, STARTING_PRICE, {"from": get_account()})
Since you already specified at the top(as global variables) that the mock price output decimal place would be 8 and added the additional 8 zeros after 2000 to send the mock constructor an initial price with 8 decimal places, by leaving the toWei function your adding an additional 18 decimal places for a total of 26 which is wrong.
In addition, you're output after deploying the mock is being added another 10 decimal places for a total of 36.
DECIMALS = 8
STARTING_PRICE = 200000000000
My guess is the reason you're getting 0 is because you exceeded the size allowed by the uint8 decimal variable or maybe sending the initial price with 26 decimal places to the constructor takes more gas fees than the gas limit allowed to each block and transaction by ganache. Someone clarify if able!

Please compare your FundMe.sol with the github source:
https://github.com/PatrickAlphaC/brownie_fund_me/blob/main/contracts/FundMe.sol
You'll see: function getEntranceFee() public view returns (uint256)
return (mimimumUSD * precision) / price;
it already be changed:
return ((minimumUSD * precision) / price) + 1;

I also got this issue, resolved by fixing the entrance fee. we need to keep the brackets
function getEntranceFee() public view returns (uint256) {
uint256 minimumUSD = 50 * (10**18);
uint256 price = getPrice();
uint256 pricision = 1 * (10**18);
return ((minimumUSD * pricision) / price);
}

Related

My test fails when i run "brownie test". How can I correctly compare transactionReeipt to a float in python?

from scripts.helpful_scripts import get_account
from scripts.deploy import deploy_donation
from web3 import Web3
def test_can_get_conversion_rate():
account = get_account()
donation = deploy_donation()
tx = donation.getConversionRate(100, {"from": account})
tx.wait(1)
assert tx < 0.075
assert tx > 0.06
print(f"The ethAmount is {tx}")
def main():
test_can_get_conversion_rate()
I keep getting this error when i run "brownie test" on the terminal: TypeError: '<' not supported between instances of 'TransactionReceipt' and 'float'
This is my solidity contract which i am trying to test.
The deploy python script ran well but my test script is not.
// SPDX-LIcense-Identifier: MIT
pragma solidity 0.6.6;
import "#chainlink/contracts/src/v0.6/interfaces/AggregatorV3Interface.sol";
import "#chainlink/contracts/src/v0.6/vendor/SafeMathChainlink.sol";
contract Donation {
uint256 ethAmount;
address payable owner;
AggregatorV3Interface public priceFeed;
constructor(address _priceFeed) public {
priceFeed = AggregatorV3Interface(_priceFeed);
owner = msg.sender;
}
function donate(uint256 _amount) public payable {
ethAmount = getConversionRate(_amount);
owner.transfer(ethAmount);
}
function getConversionRate(uint256 rawUSD) public returns
(uint256) {
uint256 ethUSD = (rawUSD / getPrice()) * 10**18;
return ethUSD;
}
function getPrice() internal returns (uint256) {
(, int256 answer, , , ) = priceFeed.latestRoundData();
return uint256(answer * 100000000000);
}
}
This is my deploy.py script, please have a look. Thanks
from brownie import Donation, accounts, config, network, MockV3Aggregator
from scripts.helpful_scripts import (
LOCAL_BLOCKCHAIN_ENVIRONMENTS,
deploy_mocks,
get_account,
)
def deploy_donation():
account = get_account()
if network.show_active() not in LOCAL_BLOCKCHAIN_ENVIRONMENTS:
price_feed_address = config["networks"]
[network.show_active()][
"eth_usd_price_feed_address"
]
else:
deploy_mocks()
price_feed_address = MockV3Aggregator[-1].address
donation = Donation.deploy(
price_feed_address,
{"from": account},
)
print(f"Contract deployed to {donation.address}")
return donation
def main():
deploy_donation()
So i found out my 'transactionReceipt' was actually a hashed transaction and that was why it couldn't be compared with a float. A made both my getConversionRate() and getPrice() functions a public view function. Also made a few adjustment to my deploy.py script so that it doesn't run into more errors. This solved it.

Issue Solidity, Blockchain, and Smart Contract Course - Patrick Collins

I am having an issue with the Fund me part of this course (16 hour video on YT) for brownie.
I am following Patrick and have the same set-up yet when I try to run this script I get an error he doesn't have:
brownie run scripts/deploy.py --network rinkeby
Brownie v1.17.2 - Python development framework for Ethereum
FundmeProject is the active project.
Running 'scripts/deploy.py::main'...
File "brownie/_cli/run.py", line 50, in main
return_value, frame = run(
File "brownie/project/scripts.py", line 103, in run
return_value = f_locals[method_name](*args, **kwargs)
File "./scripts/deploy.py", line 12, in main
deploy_fund_me()
File "./scripts/deploy.py", line 7, in deploy_fund_me
fund_me = FundMe.deploy({"from:account"}, publish_source=True)
File "brownie/network/contract.py", line 523, in _call_
raise AttributeError(
AttributeError: Final argument must be a dict of transaction parameters that includes a `from` field specifying the address to deploy from
Edit: needed more background info
The contract
// SPDX-License-Identifier: MIT
// Smart contract that lets anyone deposit ETH into the contract
// Only the owner of the contract can withdraw the ETH
pragma solidity ^0.6.6;
// Get the latest ETH/USD price from chainlink price feed
import "#chainlink/contracts/src/v0.6/interfaces/AggregatorV3Interface.sol";
import "#chainlink/contracts/src/v0.6/vendor/SafeMathChainlink.sol";
contract FundMe {
// safe math library check uint256 for integer overflows
using SafeMathChainlink for uint256;
//mapping to store which address depositeded how much ETH
mapping(address => uint256) public addressToAmountFunded;
// array of addresses who deposited
address[] public funders;
//address of the owner (who deployed the contract)
address public owner;
// the first person to deploy the contract is
// the owner
constructor() public {
owner = msg.sender;
}
function fund() public payable {
// 18 digit number to be compared with donated amount
uint256 minimumUSD = 50 * 10**18;
//is the donated amount less than 50USD?
require(
getConversionRate(msg.value) >= minimumUSD,
"You need to spend more ETH!"
);
//if not, add to mapping and funders array
addressToAmountFunded[msg.sender] += msg.value;
funders.push(msg.sender);
}
//function to get the version of the chainlink pricefeed
function getVersion() public view returns (uint256) {
AggregatorV3Interface priceFeed = AggregatorV3Interface(
0x8A753747A1Fa494EC906cE90E9f37563A8AF630e
);
return priceFeed.version();
}
function getPrice() public view returns (uint256) {
AggregatorV3Interface priceFeed = AggregatorV3Interface(
0x8A753747A1Fa494EC906cE90E9f37563A8AF630e
);
(, int256 answer, , , ) = priceFeed.latestRoundData();
// ETH/USD rate in 18 digit
return uint256(answer * 10000000000);
}
// 1000000000
function getConversionRate(uint256 ethAmount)
public
view
returns (uint256)
{
uint256 ethPrice = getPrice();
uint256 ethAmountInUsd = (ethPrice * ethAmount) / 1000000000000000000;
// the actual ETH/USD conversation rate, after adjusting the extra 0s.
return ethAmountInUsd;
}
//modifier: https://medium.com/coinmonks/solidity-tutorial-all-about-modifiers-a86cf81c14cb
modifier onlyOwner() {
//is the message sender owner of the contract?
require(msg.sender == owner);
_;
}
// onlyOwner modifer will first check the condition inside it
// and
// if true, withdraw function will be executed
function withdraw() public payable onlyOwner {
// If you are using version eight (v0.8) of chainlink aggregator interface,
// you will need to change the code below to
// payable(msg.sender).transfer(address(this).balance);
msg.sender.transfer(address(this).balance);
//iterate through all the mappings and make them 0
//since all the deposited amount has been withdrawn
for (
uint256 funderIndex = 0;
funderIndex < funders.length;
funderIndex++
) {
address funder = funders[funderIndex];
addressToAmountFunded[funder] = 0;
}
//funders array will be initialized to 0
funders = new address[](0);
}
}
Brownie-config.yaml:
dependencies:
# - <orghanization/repo>#<version>
- smartcontractkit/chainlink-brownie-contracts#1.1.1
compiler:
solc:
remappings:
- "#chainlink=smartcontractkit/chainlink-brownie-contracts#1.1.1"
dotenv: .env
wallets:
from_key: ${PRIVATE_KEY}
deploy.py
from brownie import FundMe
from scripts.helpful_scripts import get_account
def deploy_fund_me():
account = get_account()
fund_me = FundMe.deploy({"from:account"}, publish_source=True)
print(f"Contract deployed to {fund_me.address}")
def main():
deploy_fund_me(
helpful_scripts.py
from brownie import network, config, accounts
def get_account():
if network.show_active() == "development":
return accounts[0]
else:
return accounts.add(config["wallets"]["from_key"])
.env
export PRIVATE_KEY=0x******f5a557bbb30bb35f8c9929ded41eb9a15******b066d72b44890******
export WEB3_INFURA_PROJECT_ID=97417cf50bab449c88c09debfe******
export ETHERSCAN_TOKEN=ZMFY1FUWR67X4RZYHHGR6S4NNX1G******
I am not sure that this solve your issue, but you must correct your code:{"from:account"} ---> {"from": account}
I had the same issue with the upgrades project and adding this part to brownie-config.yaml resolve for me:
networks:
rinkeby:
verify: True

FLashLoan aave v3

I'm trying to make a flash loan using Aave V3. On the interface, there's a function called flashLoanSimple(), shown here:
function flashLoanSimple(
address receiverAddress,
address asset,
uint256 amount,
bytes calldata params,
uint16 referralCode
) external;
The console returns an error:
ValueError: Gas estimation failed: 'The execution failed due to an exception.'. This transaction will likely revert. If you wish to broadcast, you must set the gas limit manually.
I don't know which is the trouble, because I deposit Dai on the contract address, then try to make the flashLoan and failure.
Here's the full code:
contract Flasher is FlashLoanSimpleReceiverBase, Withdrawable {
constructor(IPoolAddressesProvider _providerAddress)
FlashLoanSimpleReceiverBase(_providerAddress)
{}
function flashLoanSimple(address asset, uint256 amount) external {
require(asset != address(0), "Address zero no");
require(amount > 0, "Pone plata");
address receiverAddress = address(this);
uint256 _amount = amount * 10**18;
bytes memory params = "";
uint16 referralCode = 0;
POOL.flashLoanSimple(
receiverAddress,
asset,
_amount,
params,
referralCode
);
}
function executeOperation(
address asset,
uint256 amount,
uint256 premium,
address initiator,
bytes calldata params
) external override returns (bool) {
//logic.....
//cALCULAR PRIMA
uint256 fee = LowGasSafeMath.add(amount, premium);
IERC20(asset).approve(address(POOL), fee);
return true;
}
You forgot to close the bracket '}' that you opened in the contract.
Check if is only that what is causing that error.
Too I'm seeing that you imported 'SafeMath' and not 'LowGasSafeMath'.

Converting NodeJs to python

I hope that it is appropriate to ask whether someone with NodeJS experience can convert this code to python. I have already translated some small tiny bits, but I am getting errors when I run my code, I have tried for hours but had no luck in figuring out what my issue is. I have no NodeJs experience for clarification. Many Thanks in advance if do it :)
require('dotenv').config() // Load .env file
const axios = require('axios')
const Discord = require('discord.js')
const client = new Discord.Client()
function getPrices() {
// API for price data.
axios.get(`https://api.coingecko.com/api/v3/coins/markets?vs_currency=${process.env.PREFERRED_CURRENCY}&ids=${process.env.COIN_ID}`).then(res => {
// If we got a valid response
if(res.data && res.data[0].current_price && res.data[0].price_change_percentage_24h) {
let currentPrice = res.data[0].current_price || 0 // Default to zero
let priceChange = res.data[0].price_change_percentage_24h || 0 // Default to zero
let symbol = res.data[0].symbol || '?'
client.user.setPresence({
game: {
// Example: "Watching -5,52% | BTC"
name: `${priceChange.toFixed(2)}% | ${symbol.toUpperCase()}`,
type: 3 // Use activity type 3 which is "Watching"
}
})
client.guilds.find(guild => guild.id === process.env.SERVER_ID).me.setNickname(`${(currentPrice).toLocaleString().replace(/,/g,process.env.THOUSAND_SEPARATOR)}${process.env.CURRENCY_SYMBOL}`)
console.log('Updated price to', currentPrice)
}
else
console.log('Could not load player count data for', process.env.COIN_ID)
}).catch(err => console.log('Error at api.coingecko.com data:', err))
}
// Runs when client connects to Discord.
client.on('ready', () => {
console.log('Logged in as', client.user.tag)
getPrices() // Ping server once on startup
// Ping the server and set the new status message every x minutes. (Minimum of 1 minute)
setInterval(getPrices, Math.max(1, process.env.MC_PING_FREQUENCY || 1) * 60 * 1000)
})
// Login to Discord
client.login(process.env.DISCORD_TOKEN)

Firestore - Recursively Copy a Document and all it's subcollections/documents

we're using Google's Firestore for embedded machine configuration data. Because this data controls a configurable pageflow and lots of other things, it's segmented up into lots of subcollections. Each machine has it's own top level document in this system. However, it takes forever when we go to add machines to the fleet because we have to manually copy over all this data in multiple documents. Does anyone know how to go about recursively copying a Firestore document, all it's subcollections, their documents, subcollections, etc in Python. You'd have a document ref to the top level as well as a name for the new top level doc.
You can use something like this to recursively read and write from a collection to another one:
def read_recursive(
source: firestore.CollectionReference,
target: firestore.CollectionReference,
batch: firestore.WriteBatch,
) -> None:
global batch_nr
for source_doc_ref in source:
document_data = source_doc_ref.get().to_dict()
target_doc_ref = target.document(source_doc_ref.id)
if batch_nr == 500:
log.info("commiting %s batched operations..." % batch_nr)
batch.commit()
batch_nr = 0
batch.set(
reference=target_doc_ref,
document_data=document_data,
merge=False,
)
batch_nr += 1
for source_coll_ref in source_doc_ref.collections():
target_coll_ref = target_doc_ref.collection(source_coll_ref.id)
read_recursive(
source=source_coll_ref.list_documents(),
target=target_coll_ref,
batch=batch,
)
batch = db_client.batch()
read_recursive(
source=db_client.collection("src_collection_name"),
target=db_client.collection("target_collection_name"),
batch=batch,
)
batch.commit()
Writes are in batches and this saves a lot of time (in my case it finished in half the time compared with set).
The questions asks for Python, but in my case I needed to do recursive deep copy of Firestore docs / collections in NodeJS (Typescript), and using a Document as starting point of the recursion.
(This is a solution based on the Python script by #cristi)
Function definition
import {
CollectionReference,
DocumentReference,
DocumentSnapshot,
QueryDocumentSnapshot,
WriteBatch,
} from 'firebase-admin/firestore';
interface FirestoreCopyRecursiveContext {
batchSize: number;
/**
* Wrapped Firestore WriteBatch. In firebase-admin#11.0.1, you can't continue
* using the WriteBatch object after you call WriteBatch.commit().
*
* Hence, we need to replaced "used up" WriteBatch's with new ones.
* We also need to reset the count after committing, and because we
* want all recursive invocations to share the same count + WriteBatch instance,
* we pass this data via object reference.
*/
writeBatch: {
writeBatch: WriteBatch,
/** Num of items in current batch. Reset to 0 when `commitBatch` commits. */
count: number;
};
/**
* Function that commits the batch if it reached the limit or is forced to.
* The WriteBatch instance is automatically replaced with fresh one
* if commit did happen.
*/
commitBatch: (force?: boolean) => Promise<void>;
/** Callback to insert custom logic / write operations when we encounter a document */
onDocument?: (
sourceDoc: QueryDocumentSnapshot | DocumentSnapshot,
targetDocRef: DocumentReference,
context: FirestoreCopyRecursiveContext
) => unknown;
/** Callback to insert custom logic / write operations when we encounter a collection */
onCollection?: (
sourceDoc: CollectionReference,
targetDocRef: CollectionReference,
context: FirestoreCopyRecursiveContext
) => unknown;
logger?: Console['info'];
}
type FirestoreCopyRecursiveOptions = Partial<Omit<FirestoreCopyRecursiveContext, 'commitBatch'>>;
/**
* Copy all data from one document to another, including
* all subcollections and documents within them, etc.
*/
export const firestoreCopyDocRecursive = async (
/** Source Firestore Document Snapshot, descendants of which we want to copy */
sourceDoc: QueryDocumentSnapshot | DocumentSnapshot,
/** Destination Firestore Document Ref */
targetDocRef: DocumentReference,
options?: FirestoreCopyRecursiveOptions,
) => {
const batchSize = options?.batchSize ?? 500;
const writeBatchRef = options?.writeBatch || { writeBatch: firebaseFirestore.batch(), count: 0 };
const onDocument = options?.onDocument;
const onCollection = options?.onCollection;
const logger = options?.logger || console.info;
const commitBatch = async (force?: boolean) => {
// Commit batch only if size limit hit or forced
if (writeBatchRef.count < batchSize && !force) return;
logger(`Commiting ${writeBatchRef.count} batched operations...`);
await writeBatchRef.writeBatch.commit();
// Once we commit the batched data, we have to create another WriteBatch,
// otherwise we get error:
// "Cannot modify a WriteBatch that has been committed."
// See https://dev.to/wceolin/cannot-modify-a-writebatch-that-has-been-committed-265f
writeBatchRef.writeBatch = firebaseFirestore.batch();
writeBatchRef.count = 0;
};
const context = {
batchSize,
writeBatch: writeBatchRef,
onDocument,
onCollection,
commitBatch,
};
// Copy the contents of the current docs
const sourceDocData = sourceDoc.data();
await writeBatchRef.writeBatch.set(targetDocRef, sourceDocData, { merge: false });
writeBatchRef.count += 1;
await commitBatch();
// Allow to make additional changes to the target document from
// outside the func after copy command is enqueued / commited.
await onDocument?.(sourceDoc, targetDocRef, context);
// And try to commit in case user updated the count but forgot to commit
await commitBatch();
// Check for subcollections and docs within them
for (const sourceSubcoll of await sourceDoc.ref.listCollections()) {
const targetSubcoll = targetDocRef.collection(sourceSubcoll.id);
// Allow to make additional changes to the target collection from
// outside the func after copy command is enqueued / commited.
await onCollection?.(sourceSubcoll, targetSubcoll, context);
// And try to commit in case user updated the count but forgot to commit
await commitBatch();
for (const sourceSubcollDoc of (await sourceSubcoll.get()).docs) {
const targetSubcollDocRef = targetSubcoll.doc(sourceSubcollDoc.id);
await firestoreCopyDocRecursive(sourceSubcollDoc, targetSubcollDocRef, context);
}
}
// Commit all remaining operations
return commitBatch(true);
};
How to use it
const sourceDocRef = getYourFaveFirestoreDocRef(x);
const sourceDoc = await sourceDocRef.get();
const targetDocRef = getYourFaveFirestoreDocRef(y);
// Copy firestore resources
await firestoreCopyDocRecursive(sourceDoc, targetDocRef, {
logger,
// Note: In my case some docs had their doc ID also copied as a field.
// Because the copied documents get a new doc ID, we need to update
// those fields too.
onDocument: async (sourceDoc, targetDocRef, context) => {
const someDocPattern = /^nameOfCollection\/[^/]+?$/;
const subcollDocPattern = /^nameOfCollection\/[^/]+?\/nameOfSubcoll\/[^/]+?$/;
// Update the field that holds the document ID
if (targetDocRef.path.match(someDocPattern)) {
const docId = targetDocRef.id;
context.writeBatch.writeBatch.set(targetDocRef, { docId }, { merge: true });
context.writeBatch.count += 1;
await context.commitBatch();
return;
}
// In a subcollection, I had to update multiple ID fields
if (targetDocRef.path.match(subcollDocPattern)) {
const docId = targetDocRef.parent.parent?.id;
const subcolDocId = targetDocRef.id;
context.writeBatch.writeBatch.set(targetDocRef, { docId, subcolDocId }, { merge: true });
context.writeBatch.count += 1;
await context.commitBatch();
return;
}
},
});

Categories

Resources