I have created an API which is used via a web UI to control my BeagleBone Blue. The board itself controls two DC motors (for driving) and a servo motor (for turning a camera). My API receives the GET request from the UI to control the DC motors but for some reason the motors are not getting a signal. For the longest time (with the same exact code I am using now) it was working just fine. I used a volt-meter to check if a signal was being sent to the DC motors and no power is being sent. I have updated my board and made sure I have the latest version of RCPY. I am stumped as to why this is happening. The strange thing is the servo motor works perfectly fine (as it did originally).
Here is the code for my API:
import rcpy
import rcpy.motor as motor
import rcpy.servo as srv
import rcpy.clock as clock
import time
from flask import Flask, render_template, request, jsonify, make_response
from datetime import datetime
app = Flask(__name__)
#app.route('/')
def index():
return render_template('robocar.html')
#app.route('/servo/<duty>')
def move_camera(duty=0.0):
global servo_duty
if request.method == "GET":
servo = srv.Servo(1)
duty = max(min(1.0, float(duty)), -1.0)
rcpy.set_state(rcpy.RUNNING)
clk = clock.Clock(servo, 0.02)
srv.enable()
clk.start()
servo.set(duty)
time.sleep(1)
clk.stop()
srv.disable()
return _corsify_actual_response(jsonify({"response": "200"}))
else:
return _build_cors_prelight_response()
#app.route('/drive/<speed>/<direction>')
def move_car(speed=0.0, direction=0):
if request.method == "GET":
motor1_channel = 1
motor2_channel = 2
speed = float(speed)
direction = int(direction)
wheel_speeds = [speed, -speed]
if direction == 1:
motor.set(motor1_channel, wheel_speeds[0])
motor.set(motor2_channel, wheel_speeds[1])
elif direction == 4:
motor.set(motor1_channel, wheel_speeds[0])
motor.set(motor2_channel, wheel_speeds[0])
if speed >= 0.1:
time.sleep(0.5)
stop_car()
elif direction == 3:
motor.set(motor1_channel, wheel_speeds[1])
motor.set(motor2_channel, wheel_speeds[0])
elif direction == 2:
motor.set(motor1_channel, wheel_speeds[1])
motor.set(motor2_channel, wheel_speeds[1])
if speed >= 0.1:
time.sleep(0.5)
stop_car()
#time.sleep(20)
#stop_car()
return _corsify_actual_response(jsonify({"response": "200"}))
else:
return _build_cors_prelight_response()
#app.route('/brake')
def stop_car():
if request.method == "GET":
motor1_channel = 1
motor2_channel = 2
motor.set_brake(motor1_channel)
motor.set_brake(motor2_channel)
return _corsify_actual_response(jsonify({"response": "200"}))
else:
return _build_cors_prelight_response()
def _build_cors_prelight_response():
response = make_response()
response.headers.add("Access-Control-Allow-Origin", "*")
response.headers.add('Access-Control-Allow-Headers', "*")
response.headers.add('Access-Control-Allow-Methods', "*")
return response
def _corsify_actual_response(response):
response.headers.add("Access-Control-Allow-Origin", "*")
return response
if __name__ == '__main__':
app.run(host='0.0.0.0')
Here is my javascript to make requests to the API:
servo_duty = 0.0
function moveCamera(duty) {
servo_duty += duty
servo_duty = Math.max(-1.0, Math.min(1.0, servo_duty))
$.ajax({
url: "http://beaglebone.local:5000/servo/" + servo_duty,
type: "GET",
cache: false,
data: {},
success: () => {
console.log('ok');
},
error: function(result) {
console.log(result);
}
});
}
function moveCar(speed, direction) {
$.ajax({
url: "http://beaglebone.local:5000/drive/" + speed + "/" + direction,
type: "GET",
cache: false,
data: {},
success: () => {
console.log('ok');
},
error: function(result) {
console.log(result);
}
});
}
function stopCar() {
$.ajax({
url: "http://beaglebone.local:5000/brake",
type: "GET",
cache: false,
data: {},
success: () => {
console.log('ok');
},
error: function(result) {
console.log(result);
}
});
}
When a request for the DC motors is sent to the board, I get a 200 response. No errors in the console or on the API side. Any ideas? (Sorry for the wall of code).
Related
Im trying to compile smart contracts with python. When i write the code to compile and deploy a simple hello world type contract it compiles just fine and the abi and bin outputs are correct and can be seen to be correct as one (abi and bin together) or as individual abi and bin.
When i use the same code to compile a slightly more complex contract (just a basic erc20 token) which is created by my program to include user inputs such as token name, ticker etc the combined abi/bin is correct and viewable, the individual bin is correct and viewable but the individual bin is just blank, an empty string.
Ive tried compiling both from source and (.sol) files using solcx, again both work for the simple contract but not for the erc20.
This is the simple contract code which works (in all code parts i have removed my private and public keys and infuria ID)
from solcx import compile_source
import solcx
from web3 import Web3, HTTPProvider
solcx.install_solc()
compiled_solidity = compile_source('''
// SPDX-License-Identifier: MIT
pragma solidity 0.8.17;
contract Greeter {
function sayHelloWorld() public pure returns (string memory) {
return "Hello World";
}
}
''', output_values = ['abi', 'bin']
)
Infuria = "https://goerli.infura.io/v3/MY INFURIA ID"
contract_id, contract_interface = compiled_solidity.popitem()
print(contract_id)
print('''
''')
print(contract_interface['abi'])
print('''
''')
print(contract_interface['bin'])
w3 = Web3(Web3.HTTPProvider(Infuria))
w3.isConnected()
account_from = {
'private_key': 'MY PRIVATE KEY',
'address': 'MY PUBLIC KEY'
}
Greeter = w3.eth.contract(abi=contract_interface['abi'], bytecode = contract_interface['bin'])
construct_txn = Greeter.constructor(5).buildTransaction({
'from': account_from['address'],
'nonce': w3.eth.get_transaction_count(account_from['address']),
})
tx_create = w3.eth.account.sign_transaction(construct_txn, account_from['private_key'])
tx_hash = w3.eth.send_raw_transaction(tx_create.rawTransaction)
tx_receipt = w3.eth.wait_for_transaction_receipt(tx_hash)
print(tx_receipt)
The code for the erc20 contract with the missing individual bin is
import time
import sys
import json
from web3 import Web3, HTTPProvider
from solcx import compile_source
import solcx
from Header import RedHeader, MyHeader
Helper = "}"
Helper1 = "{"
TokenName = "blank"
Ticker = "blank"
Supply = "blank"
InfuriaID = "MY INFURIA ID"
ContractCode = "blank"
RedHeader(MyHeader)
WalletAddress = input('input wallet address: ')
PrivateKey = input('input wallet private key: ')
Network = input('''select network:
1. Gorli Test Net
2. Ethereum Main Net
: ''')
SetNetwork = "blank"
EthMainnet = "testeth"
GoerliTest = "https://goerli.infura.io/v3/"
if Network == "1":
SetNetwork = GoerliTest
elif Network == "2":
SetNetwork = EthMainnet
else:
print(Network = input('''Incorrect selction, try again
select network:
1. Goerli Test Net
2. Ethereum Main Net
: '''))
TokenType = input(''' Please select the type of token you would like.
1. Basic ERC20 Token
2. ERC20 Token with reflection.
3. ERC20 Token with reflection and burn.
:''')
#basic token
if TokenType == "1":
TokenName = input("Input token name: ")
Ticker = input("input token ticker: ")
Supply = input("input token supply: ")
ContractCode = f'''pragma solidity ^0.8.7;
import "./ERC20.sol";
contract Greeter is ERC20 {Helper1}
constructor() ERC20("{TokenName}", "{Ticker}") {Helper1}
_mint(msg.sender, {Supply} * 10 ** decimals());
{Helper}
{Helper}'''
f = open('FullContract.sol', 'r+')
f.write(f'''pragma solidity ^0.8.7;
import "./ERC20.sol";
contract Greeter is ERC20 {Helper1}
constructor() ERC20("{TokenName}", "{Ticker}") {Helper1}
_mint(msg.sender, {Supply} * 10 ** decimals());
{Helper}
{Helper}''')
elif TokenType == "2":
print("Coming Soon")
elif TokenType == "3":
print(SetNetwork)
else:
print("Invalid Entry")
w3 = Web3(Web3.HTTPProvider(SetNetwork + InfuriaID))
w3.isConnected()
#install_solc('0.8.7')
solcx.install_solc()
#compiled_solidity = compile_source(f'''pragma solidity ^0.8.7;
# import "./ERC20.sol";
# contract Greeter is ERC20 {Helper1}
# constructor() ERC20("{TokenName}", "{Ticker}") {Helper1}
# _mint(msg.sender, {Supply} * 10 ** decimals());
# {Helper}
# {Helper}''', output_values = ['abi', 'bin']
# )
f = open('FullContract.sol', 'r')
compiled_solidity = compile_source(ContractCode, output_values = ['abi', 'bin'] )
Contract_id, contract_interface = compiled_solidity.popitem()
#abi = ContractInterface['abi']
print(contract_interface['bin'])
#print('''
#''')
print(contract_interface['abi'])
#print('''
#''')
print(compiled_solidity)
#print('''
#''')
#print(contract_interface)
account_from = {
'private_key': 'MY PRIVATE KEY',
'address': 'MY PUBLIC KEY'
}
Greeter = w3.eth.contract(abi=contract_interface['abi'], bytecode = contract_interface['bin'])
construct_txn = Greeter.constructor(5).buildTransaction({
'from': account_from['address'],
'nonce': w3.eth.get_transaction_count(account_from['address']),
})
tx_create = w3.eth.account.sign_transaction(construct_txn, account_from['private_key'])
tx_hash = w3.eth.send_raw_transaction(tx_create.rawTransaction)
tx_receipt = w3.eth.wait_for_transaction_receipt(tx_hash)
print(tx_receipt)
I just do not understand why the first code works but the second has this issue.
Also this is my first ever attempt at writing a python program so i apologise if anything else is not as it should be, im just learning.
With the second code i expected the results to be the same as with the first more basic contract. Ive tried also compiling from files but get the same result.
I've been following this tutorial on websockets, and I've run into this bug which I really can't find.
So basically, every time I emit a message from a websocket, two messages pop up. I'm not entirely sure why.
I have looked at my code about a dozen times and it doesn't seem like I am creating a listener inside another listener or anything
Here is what it looks like when I log all events:
Server listening on ws://localhost:8001
first player started game 2174705662512
second player joined game 2174705662512
X : {'type': 'play', 'row': 0, 'col': 0}
[['X', ' ', ' '], [' ', ' ', ' '], [' ', ' ', ' ']]
X : {'type': 'play', 'row': 0, 'col': 0}
It isn't your turn
first player started game 2174706074912
Notice you the events are fired twice for some reason
Without going in too much depth in my code, here is what is happening:
Server:
async def play(socket, join_key: str, player: str):
await wait_until_game_full(join_key)
game, connected = games[join_key]
for sock in connected:
if sock == socket:
continue
await sock.send(json.dumps({"type": "gameReady"})) # type: ignore
async for msg in socket:
event: Event = json.loads(msg)
assert event["type"] == "play"
print(player, ":", event)
try:
print(game.play(player, event["row"], event["col"]))
except RuntimeError as e:
print(str(e))
await error(socket, str(e))
There are some things before that pertaining to how the socket joins:
async def handle(socket):
message = await socket.recv()
event: Event = json.loads(message)
assert event["type"] == "init"
if "join" in event:
await join(socket, event["join"])
else:
await start_game(socket)
The join and start_game basically boil down to joining a game and then calling the play function, and start_game doing the same thing, just creating a game first.
const ws = new WebSocket("ws://localhost:8001/");
const queryString = window.location.search;
const urlParams = new URLSearchParams(queryString);
const gamePin = document.getElementById('gamePin')
const curPlayerElement = document.getElementById('curPlayer')
const cells = [...document.getElementsByClassName('cell')]
const board = document.getElementById('board')
let curPlayer = "X"
function playMove(row, col) {
ws.send(JSON.stringify({
type: 'play',
row: row,
col: col
}))
console.log(row, col);
}
function gameReady() {
curPlayerElement.innerText = "Current player: X"
board.classList.remove('blur')
let curCell = 0;
// Add all the event listeners
cells.forEach((cell) => {
let row = Math.floor(curCell / 3)
let col = curCell % 3;
cell.addEventListener('click', (e) => {
playMove(row, col);
})
curCell++
})
}
ws.addEventListener('message', (e) => {
let msg = JSON.parse(e.data)
if (msg.type === "init") {
gamePin.innerText = msg.join
} if (msg.type === "gameReady") {
gameReady()
}
})
ws.addEventListener('open', () => {
if (urlParams.has('createNewGame')) {
ws.send(JSON.stringify({ "type": "init" }))
} else {
// Join an existing game
let savedGamePin = sessionStorage.getItem('pin')
gamePin.innerText = savedGamePin
ws.send(JSON.stringify({
"type": "init",
"join": savedGamePin
}))
}
})
Any help would greatly be appreciated.
This is our CentralSystem.py
We have a LIVE charger configured to connect to our websocket
Ex - ws://HostName:Port/Charger Serial Number
As soon as the Charger is connected to our central system, it automatically sends us the following:
Charge point /D5200372001000010101 connected
INFO:ocpp:D5200372001000010101: receive message [2,"530","Heartbeat",{}]
Heartbeat
INFO:ocpp:D5200372001000010101: send [3,"530",{"currentTime":"2022-06-10T10:43:26Z"}]
INFO:ocpp:D5200372001000010101: receive message [2,"531","Heartbeat",{}]
Heartbeat
Issue 1 - We receive the heartbeat every 2mins but do not see anything for BootNotification and Status Notification when no vehicle is connected.
However, when the vehicle is connected, we do see a Status Notification and Heartbeats.
Issue 2 - Now, we have a mobile app that is built using Flutter which should allow us to enable "Remote Start Transaction". What is the best way to achieve this? Should we connect to the same websocket endpoint and call RemoteStartTransaction.
import asyncio
import logging
from asyncio import constants
import websockets
from datetime import datetime, timezone
from ocpp.routing import on
from ocpp.v16 import ChargePoint as cp
from ocpp.v16.enums import Action, RegistrationStatus, RemoteStartStopStatus
import ocpp.v16.enums as enums
from ocpp.v16 import call_result, call
import requests
logging.basicConfig(level=logging.INFO)
class ChargePoint(cp):
chargingProfile = {}
#on(Action.RemoteStartTransaction)
async def on_remote_start(self, id_tag, connector_id):
print("remotely starting")
return await self.remote_start_transaction()
#on(Action.BootNotification)
def on_boot_notification(self, charge_point_vendor, charge_point_model, **kwargs):
print("Boot Notification")
return call_result.BootNotificationPayload(
# current_time=datetime.utcnow().isoformat() + 'Z',
current_time=datetime.utcnow().strftime('%Y-%m-%dT%H:%M:%S') + "Z",
interval=100,
status=RegistrationStatus.accepted
)
#on(Action.StatusNotification)
def on_status_notification(self, connector_id, error_code, status, **kwargs):
print("Status Notification")
return call_result.StatusNotificationPayload()
#on(Action.Heartbeat)
def on_heartbeat(self):
print("Heartbeat")
return call_result.HeartbeatPayload(
current_time=datetime.utcnow().strftime('%Y-%m-%dT%H:%M:%S') + "Z"
)
#on(Action.Authorize)
def on_authorize(self, id_tag):
print("Authorize")
query = {'access_token': 'masterKey'}
response = requests.get('http://ec2-13-233-102-233.ap-south-1.compute.amazonaws.com:3000/chargersOnboarding/',
params=query)
data = response.json()
isFound = "false"
status = ""
for item in data['rows']:
if item['RFID'] == id_tag:
isFound = "true"
if item['status'] == 0:
status = "Accepted"
else:
status = "Expired"
break
else:
continue
if isFound == "true":
print("Authorized")
return call_result.AuthorizePayload(
id_tag_info={
# "expiryDate": "2022-02-04T13:00:00.21Z",
"expiryDate": "2023-05-19T13:00:00.21Z",
"parentIdTag": id_tag,
"status": status
},
)
else:
print("Not Authorized")
return call_result.AuthorizePayload(
id_tag_info={
"expiryDate": "",
"parentIdTag": id_tag,
"status": "Invalid"
},
)
#on(Action.StartTransaction)
def on_start_transaction(self, connector_id, id_tag, meter_start, timestamp, **kwargs):
print("START TRANSACTION COMING FROM CHARGER")
query = {'access_token': 'masterKey'}
response = requests.get('http://ec2-13-233-102-233.ap-south-1.compute.amazonaws.com:3000/chargersOnboarding/',
params=query)
data = response.json()
isFound = "false"
status = ""
for item in data['rows']:
if item['RFID'] == id_tag:
isFound = "true"
if item['status'] == 0:
status = "Accepted"
else:
status = "Expired"
break
else:
continue
if isFound == "true":
return call_result.StartTransactionPayload(
id_tag_info={
# "expiryDate": "2022-02-04T13:00:00.21Z",
"expiryDate": "2023-05-19T13:00:00.21Z",
"parentIdTag": id_tag,
"status": status
},
transaction_id=int(1)
)
else:
print("Not Authorized")
return call_result.StartTransactionPayload(
id_tag_info={
"expiryDate": "",
"parentIdTag": id_tag,
"status": "Invalid"
},
transaction_id=int(1)
)
#on(Action.StopTransaction)
def on_stop_transaction(self, transaction_id, timestamp, meter_stop, id_tag, **kwargs):
query = {'access_token': 'masterKey'}
response = requests.get('http://ec2-13-233-102-233.ap-south-1.compute.amazonaws.com:3000/chargersOnboarding/',
params=query)
data = response.json()
isFound = "false"
status = ""
for item in data['rows']:
if item['RFID'] == id_tag:
isFound = "true"
if item['status'] == 0:
status = "Accepted"
else:
status = "Expired"
break
else:
continue
if isFound == "true":
return call_result.StopTransactionPayload(
id_tag_info={
# "expiryDate": "2022-02-04T13:00:00.21Z",
"expiryDate": "2023-05-19T13:00:00.21Z",
"parentIdTag": id_tag,
"status": status
},
# transaction_id=int(1)
)
else:
print("Not Authorized")
return call_result.StopTransactionPayload(
id_tag_info={
"expiryDate": "",
"parentIdTag": id_tag,
"status": "Invalid"
},
transaction_id=int(1)
)
#on(Action.MeterValues)
def on_meter_value(self, **kwargs):
return call_result.MeterValuesPayload()
#on(Action.DataTransfer)
def on_data_transfer(self, vendor_id, message_id, data):
return call_result.DataTransferPayload(
status='Accepted'
)
#on(Action.ChangeAvailability)
def on_change_availabilty(self, connector_id, type):
return call_result.ChangeAvailabilityPayload(
status='Accepted'
)
async def send_limitation(self, limit):
response = await self.call(call.SetChargingProfilePayload(
connector_id=0,
cs_charging_profiles={
'chargingProfileId': 1,
'stackLevel': 0,
'chargingProfilePurpose': enums.ChargingProfilePurposeType.chargepointmaxprofile,
'chargingProfileKind': enums.ChargingProfileKindType.absolute,
'chargingSchedule': {
'startSchedule': datetime.utcnow().strftime('%Y-%m-%dT%H:%M:%S') + "Z",
'chargingRateUnit': enums.ChargingRateUnitType.amps,
'chargingSchedulePeriod': [{
'startPeriod': 0,
'limit': limit
}]
}
}
))
print("SEND Limitation")
print(response)
async def remote_start_transaction(self):
obj = {
'chargingProfileId': 1,
'stackLevel': 0,
'chargingProfilePurpose': enums.ChargingProfilePurposeType.chargepointmaxprofile,
'chargingProfileKind': enums.ChargingProfileKindType.absolute,
'chargingSchedule': {
'startSchedule': datetime.utcnow().strftime('%Y-%m-%dT%H:%M:%S') + "Z",
'chargingRateUnit': enums.ChargingRateUnitType.amps,
'chargingSchedulePeriod': [{
'startPeriod': 0,
'limit': 8.0
}]
},
}
print("REMOTE START!!!")
request = call.RemoteStartTransactionPayload(
id_tag='5C1DEA5A',
charging_profile=obj,
connector_id=1
)
response = await self.call(request)
print(response)
if response.status == RemoteStartStopStatus.accepted:
print("Transaction Started!!!")
else:
print("Transaction Failed to Start!!!")
print(response.status)
# websockets.send("Transaction Started!!!")
async def remote_stop_transaction(self):
print("REMOTE STOP!!!")
request = call.RemoteStopTransactionPayload(
transaction_id=1
)
response = await self.call(request)
if response.status == RemoteStartStopStatus.accepted:
print("Stopping transaction")
# websockets.send("Transaction Stopped!!!")
async def on_connect(websocket, path):
charge_point_id = path.strip('/')
cp = ChargePoint(charge_point_id, websocket)
try:
print(f'Charge point {path} connected')
await asyncio.gather(cp.start())
except websockets.exceptions.ConnectionClosed:
print(f"Charge Point {path} disconnected")
async def main():
server = await websockets.serve(
on_connect,
'0.0.0.0',
9000,
subprotocols=['ocpp1.6'],
ping_interval=None,
ping_timeout=None
)
logging.info("Server Started listening to new connections...")
await server.wait_closed()
if __name__ == '__main__':
asyncio.run(main())
> async def send_heartbeat(self, interval):
> request = call.HeartbeatPayload()
> while True:
> await self.call(request)
> await asyncio.sleep(interval)
>
> await asyncio.gather(cp.start(), cp.send_heartbeat(10))
I have been tasked to build a process in python that would extract the data from Elasticsearch, drop data in an Azure Blob after which Snowflake would ingest the data. I have the process running on Azure Functions that extracts an index group (like game_name.*) and for each index in the index group, it creates a thread to scroll on. I save the last date of each result and on the next run parse it in the range query. I am running the process every five minutes and have offset the end of the range by 5 minutes (we have a refresh running every 2 minutes). I let the process run for a while and then I do a gap analysis by taking a count(*) in both Elasticsearch and Snowflake by hour (or by day) and expect to have a max of 1% gap. However, for one index pattern which groups about 127 indexes, when I run a catchup job (for a day or more), the resulting gap is as expected, however, as soon as I let it run on the cron job (every 5 min), after a while I get gaps of 6-10% and only for this index group.
It looks as if the scroller function picks up an N amount of documents within the queried range but then for some reason documents are later added (PUT) with an earlier date. Or I might be wrong and my code is doing something funny. I've talked to our team and they don't cache any docs on the client, and the data is synced to a network clock (not the client's) and sending UTC.
Please see below the query I am using to paginate through elasticsearch:
def query(searchSize, lastRowDateOffset, endDate, pit, keep_alive):
body = {
"size": searchSize,
"query": {
"bool": {
"must": [
{
"exists": {
"field": "baseCtx.date"
}
},
{
"range": {
"baseCtx.date": {
"gt": lastRowDateOffset,
"lte": endDate
}
}
}
]
}
},
"pit": {
"id": pit,
"keep_alive": keep_alive
},
"sort": [
{
"baseCtx.date": {"order": "asc", "unmapped_type": "long"}
},
{
"_shard_doc": "asc"
}
],
"track_total_hits": False
}
return body
def scroller(pit,
threadNr,
index,
lastRowDateOffset,
endDate,
maxThreads,
es,
lastRowCount,
keep_alive="1m",
searchSize=10000):
cumulativeResultCount = 0
iterationResultCount = 0
data = []
dataBytes = b''
lastIndexDate = ''
startScroll = time.perf_counter()
while 1:
if lastRowCount == 0: break
#if lastRowDateOffset == endDate: lastRowCount = 0; break
try:
page = es.search(body=body)
except: # It is believed that the point in time is getting closed, hence the below opens a new one
pit = es.open_point_in_time(index=index, keep_alive=keep_alive)['id']
body = query(searchSize, lastRowDateOffset, endDate, pit, keep_alive)
page = es.search(body=body)
pit = page['pit_id']
data += page['hits']['hits']
body['pit']['id'] = pit
if len(data) > 0: body['search_after'] = [x['sort'] for x in page['hits']['hits']][-1]
cumulativeResultCount += len(page['hits']['hits'])
iterationResultCount = len(page['hits']['hits'])
#print(f"This Iteration Result Count: {iterationResultCount} -- Cumulative Results Count: {cumulativeResultCount} -- {time.perf_counter() - startScroll} seconds")
if iterationResultCount < searchSize: break
if len(data) > rowsPerMB * maxSizeMB / maxThreads: break
if time.perf_counter() - startScroll > maxProcessTimeSeconds: break
if len(data) != 0:
dataBytes = gzip.compress(bytes(json.dumps(data)[1:-1], encoding='utf-8'))
lastIndexDate = max([x['_source']['baseCtx']['date'] for x in data])
response = {
"pit": pit,
"index": index,
"threadNr": threadNr,
"dataBytes": dataBytes,
"lastIndexDate": lastIndexDate,
"cumulativeResultCount": cumulativeResultCount
}
return response
def batch(game_name, env='prod', startDate='auto', endDate='auto', writeDate=True, minutesOffset=5):
es = Elasticsearch(
esUrl,
port=9200,
timeout=300)
lowerFormat = game_name.lower().replace(" ","_")
indexGroup = lowerFormat + "*"
if env == 'dev': lowerFormat, indexGroup = 'dev_' + lowerFormat, 'dev.' + indexGroup
azFormat = re.sub(r'[^0-9a-zA-Z]+', '-', game_name).lower()
storageContainerName = azFormat
curFileName = f"{lowerFormat}_cursors.json"
curBlobFilePath = f"cursors/{curFileName}"
compressedTools = [gzip.compress(bytes('[', encoding='utf-8')), gzip.compress(bytes(',', encoding='utf-8')), gzip.compress(bytes(']', encoding='utf-8'))]
pits = []
lastRowCounts = []
# Parameter and state settings
if os.getenv(f"{lowerFormat}_maxSizeMB") is not None: maxSizeMB = int(os.getenv(f"{lowerFormat}_maxSizeMB"))
if os.getenv(f"{lowerFormat}_maxThreads") is not None: maxThreads = int(os.getenv(f"{lowerFormat}_maxThreads"))
if os.getenv(f"{lowerFormat}_maxProcessTimeSeconds") is not None: maxProcessTimeSeconds = int(os.getenv(f"{lowerFormat}_maxProcessTimeSeconds"))
# Get all indices for the indexGroup
indicesEs = list(set([(re.findall(r"^.*-", x)[0][:-1] if '-' in x else x) + '*' for x in list(es.indices.get(indexGroup).keys())]))
indices = [{"indexName": x, "lastOffsetDate": (datetime.datetime.utcnow()-datetime.timedelta(days=5)).strftime("%Y/%m/%d 00:00:00")} for x in indicesEs]
# Load Cursors
cursors = getCursors(curBlobFilePath, indices)
# Offset the current time by -5 minutes to account for the 2-3 min delay in Elasticsearch
initTime = datetime.datetime.utcnow()
if endDate == 'auto': endDate = f"{initTime-datetime.timedelta(minutes=minutesOffset):%Y/%m/%d %H:%M:%S}"
print(f"Less than or Equal to: {endDate}, {keep_alive}")
# Start Multi-Threading
while 1:
dataBytes = []
dataSize = 0
start = time.perf_counter()
if len(pits) == 0: pits = ['' for x in range(len(cursors))]
if len(lastRowCounts) == 0: lastRowCounts = ['' for x in range(len(cursors))]
with concurrent.futures.ThreadPoolExecutor(max_workers=len(cursors)) as executor:
results = [
executor.submit(
scroller,
pit,
threadNr,
x['indexName'],
x['lastOffsetDate'] if startDate == 'auto' else startDate,
endDate,
len(cursors),
es,
lastRowCount,
keep_alive,
searchSize) for x, pit, threadNr, lastRowCount in (zip(cursors, pits, list(range(len(cursors))), lastRowCounts))
]
for f in concurrent.futures.as_completed(results):
if f.result()['lastIndexDate'] != '': cursors[f.result()['threadNr']]['lastOffsetDate'] = f.result()['lastIndexDate']
pits[f.result()['threadNr']] = f.result()['pit']
lastRowCounts[f.result()['threadNr']] = f.result()['cumulativeResultCount']
dataSize += f.result()['cumulativeResultCount']
if len(f.result()['dataBytes']) > 0: dataBytes.append(f.result()['dataBytes'])
print(f"Thread {f.result()['threadNr']+1}/{len(cursors)} -- Index {f.result()['index']} -- Results pulled {f.result()['cumulativeResultCount']} -- Cumulative Results: {dataSize} -- Process Time: {round(time.perf_counter()-start, 2)} sec")
if dataSize == 0: break
lastRowDateOffsetDT = datetime.datetime.strptime(max([x['lastOffsetDate'] for x in cursors]), '%Y/%m/%d %H:%M:%S')
outFile = f"elasticsearch/live/{lastRowDateOffsetDT:%Y/%m/%d/%H}/{lowerFormat}_live_{lastRowDateOffsetDT:%Y%m%d%H%M%S}_{datetime.datetime.utcnow():%Y%m%d%H%M%S}.json.gz"
print(f"Starting compression of {dataSize} rows -- {round(time.perf_counter()-start, 2)} sec")
dataBytes = compressedTools[0] + compressedTools[1].join(dataBytes) + compressedTools[2]
# Upload to Blob
print(f"Comencing to upload data to blob -- {round(time.perf_counter()-start, 2)} sec")
uploadJsonGzipBlobBytes(outFile, dataBytes, storageContainerName, len(dataBytes))
print(f"File compiled: {outFile} -- {dataSize} rows -- Process Time: {round(time.perf_counter()-start, 2)} sec\n")
# Update cursors
if writeDate: postCursors(curBlobFilePath, cursors)
# Clean Up
print("Closing PITs")
for pit in pits:
try: es.close_point_in_time({"id": pit})
except: pass
print(f"Closing Connection to {esUrl}")
es.close()
return
# Start the process
while 1:
batch("My App")
I think I just need a second pair of eyes to point out where the issue might be in the code. I've tried increasing the minutesOffset argv to 60 (so every 5 minutes it pulls the data from the last run until Now()-60 minutes) but had the same issue. Please help.
So the "baseCtx.date" is triggered by the client and it seems that in some cases there is a delay between when the event is triggered and when it is available to be searched. We fixed this by using the ingest pipeline as follows:
PUT _ingest/pipeline/indexDate
{
"description": "Creates a timestamp when a document is initially indexed",
"version": 1,
"processors": [
{
"set": {
"field": "indexDate",
"value": "{{{_ingest.timestamp}}}",
"tag": "indexDate"
}
}
]
}
And set index.default_pipeline to "indexDate" in the template settings. Every month the index name changes (we append the year and month) and this approach creates a server date we used to scroll.
i have a file and a want to split the based on the string "async" into different files. The expected output is a little messy. I try to use a word as key ("async") to divide the file but the generated files have the first line of its function with the context of the below function. For example, the file is:
'use strict';
const shim = require('fabric-shim');
const util = require('util');
let Chaincode = class {
async Init(stub) {
let ret = stub.getFunctionAndParameters();
console.info(ret);
console.info('=========== Instantiated Marbles Chaincode ===========');
return shim.success();
}
async Invoke(stub) {
console.info('Transaction ID: ' + stub.getTxID());
console.info(util.format('Args: %j', stub.getArgs()));
let ret = stub.getFunctionAndParameters();
console.info(ret);
let method = this[ret.fcn];
if (!method) {
console.log('no function of name:' + ret.fcn + ' found');
throw new Error('Received unknown function ' + ret.fcn + ' invocation');
}
try {
let payload = await method(stub, ret.params, this);
return shim.success(payload);
} catch (err) {
console.log(err);
return shim.error(err);
}
}
async initMarble(stub, args, thisClass) {
if (args.length != 4) {
throw new Error('Incorrect number of arguments. Expecting 4');
}
// ==== Input sanitation ====
console.info('--- start init marble ---')
if (args[0].lenth <= 0) {
throw new Error('1st argument must be a non-empty string');
}
if (args[1].lenth <= 0) {
throw new Error('2nd argument must be a non-empty string');
}
if (args[2].lenth <= 0) {
throw new Error('3rd argument must be a non-empty string');
}
if (args[3].lenth <= 0) {
throw new Error('4th argument must be a non-empty string');
}
let marbleName = args[0];
let color = args[1].toLowerCase();
let owner = args[3].toLowerCase();
let size = parseInt(args[2]);
if (typeof size !== 'number') {
throw new Error('3rd argument must be a numeric string');
}
let marbleState = await stub.getState(marbleName);
if (marbleState.toString()) {
throw new Error('This marble already exists: ' + marbleName);
}
// ==== Create marble object and marshal to JSON ====
let marble = {};
marble.docType = 'marble';
marble.name = marbleName;
marble.color = color;
marble.size = size;
marble.owner = owner;
await stub.putState(marbleName, Buffer.from(JSON.stringify(marble)));
let indexName = 'color~name'
let colorNameIndexKey = await stub.createCompositeKey(indexName, [marble.color, marble.name]);
console.info(colorNameIndexKey);
console.info('- end init marble');
}
i tried this:
import re
import os
filetype = '.js'
result = ''
count = 0
start = 0
name = 'functions'
matchedLine = ''
stringToMatch = 'async'
with open ('myjson.js', 'r') as f:
for x in f.read().split("\n"):
if stringToMatch in x:
if (start == 1):
with open (name + str(count) + '.js', 'w') as opf:
matchedLine = x
opf.write(matchedLine + '\n' + result)
opf.close()
result = ''
print (count)
count+= 1
matchedLine = ''
else:
start = 1
else:
if (result == ''):
result = x
else:
result = result + '\n' + x
but the output is a little bit messy
function0.js:
async Invoke(stub) {
'use strict';
const shim = require('fabric-shim');
const util = require('util');
let Chaincode = class {
let ret = stub.getFunctionAndParameters();
console.info(ret);
console.info('=========== Instantiated Marbles Chaincode ===========');
return shim.success();
}
function1.js:
async initMarble(stub, args, thisClass) {
console.info('Transaction ID: ' + stub.getTxID());
console.info(util.format('Args: %j', stub.getArgs()));
let ret = stub.getFunctionAndParameters();
console.info(ret);
let method = this[ret.fcn];
if (!method) {
console.log('no function of name:' + ret.fcn + ' found');
throw new Error('Received unknown function ' + ret.fcn + ' invocation');
}
try {
let payload = await method(stub, ret.params, this);
return shim.success(payload);
} catch (err) {
console.log(err);
return shim.error(err);
}
}
There must be many ways to do this. Here is one:
import re
class Writer:
def __init__(self):
self._num = 0
self._fh = None
def close(self):
if self._fh:
self._fh.close()
def start_file(self):
self.close()
self._fh = open("file{}.js".format(self._num), "w")
self._num += 1
def write(self, data):
if self._fh:
self._fh.write(data)
writer = Writer()
with open('myjson.js') as f:
for line in f:
if re.match(' *async ', line):
writer.start_file()
writer.write(line)
writer.close()
If your goal is to separate all the sections that have async code into individual files, one method you might try would be to count the curly brackets for open, and then closed. To do this, you would set a variable that positively increments for every { and negatively for each } e.g (not optimized/pretty, just explaining).
brackets = 0
buffer = ""
found_async = False
for line_of_code in code:
if "async" in line_of_code:
if "{" in line_of_code:
brackets += 1
if "}" in line_of_code:
brackets -= 1
buffer += line_of_code
if brackets == 0:
write_buffer_to_file_here
buffer = ""
As a concept, this will probably not work as is, but should give you an idea of what I'm trying to say.