I have created a ROS package that use custom IMU messages. Instead of that I would like to change the implementation to fit sensor_msgs/msg/imu instead of custom one. So first I found the nodes inside the package that subscribe to that custom IMU message. This is that node.
#!/usr/bin/python3
import numpy as np
from node_registry.decorators import rosnode, register
from driver_ros_msgs.msg import GImu as Imu
from deepx_ros_common.coordinate_converter import quaternion2rpy
_sub_topic = "/backhoe/imu"
_qos = 1
#rosnode.parameter("period", 0.025)
#rosnode
def node_name():
return "test_imu"
#rosnode.inject
def yaw_deg():
return 0
#rosnode.subscribe(Imu, _sub_topic, _qos)
def subscrbe_cb(msg):
orientation = msg.imu.orientation
ypr_rad = quaternion2rpy(
[orientation.w, orientation.x, orientation.y, orientation.z])[0]
rosnode.logger.info(f"Output ypr: {np.rad2deg(ypr_rad)}")
register()
So is it enough just to change the line _sub_topic = "/backhoe/imu" into _sub_topic = "/sensor_msgs/msg/imu" or need something more in this node? Also I have IMU driver cpp code like this
#include <tf2/LinearMath/Quaternion.h>
#include <string>
#include <memory>
#include "vectornav_driver/imu_driver.hpp"
namespace vectornav_driver
{
ImuDriver::ImuDriver(
const std::string node_name, const rclcpp::NodeOptions & options)
: Node(node_name, options),
header_('$'),
delim_("\r\n"),
status_start_(26),
status_end_(30),
yaw_start_(31),
yaw_end_(39),
pitch_start_(40),
pitch_end_(48),
roll_start_(49),
roll_end_(57)
{
}
ImuDriver::~ImuDriver()
{
}
void ImuDriver::init()
{
rclcpp::Parameter frame_id;
rclcpp::Parameter ip;
rclcpp::Parameter port;
auto flag_frame_id = get_parameter_or(
"frame_id", frame_id,
rclcpp::Parameter("frame_id", "default_link"));
if (!flag_frame_id) {
RCLCPP_WARN_ONCE(
get_logger(),
"Could not get frame_id, setting: %s",
frame_id.as_string().c_str());
}
frame_id_ = frame_id.as_string();
auto flag_ip = get_parameter_or(
"ip", ip,
rclcpp::Parameter("ip", "192.168.255.1"));
if (!flag_ip) {
RCLCPP_WARN_ONCE(
get_logger(),
"Could not get ip, setting: %s",
ip.as_string().c_str());
}
auto flag_port = get_parameter_or(
"port", port,
rclcpp::Parameter("port", 10003));
if (!flag_port) {
RCLCPP_WARN_ONCE(
get_logger(),
"Could not get port, setting: %d",
port.as_int());
}
auto hardware_id = ip.as_string() + ":" + std::to_string(port.as_int());
rclcpp::Parameter buffer_limit;
auto flag_buffer_limit = get_parameter_or(
"buffer_limit", buffer_limit,
rclcpp::Parameter("buffer_limit", 143));
if (!flag_buffer_limit) {
RCLCPP_WARN_ONCE(
get_logger(),
"Could not get buffer_limit, setting: %d",
buffer_limit.as_int());
}
buffer_limit_ = buffer_limit.as_int();
rclcpp::Parameter diagnostics_enable;
auto flag_diag_enable = get_parameter_or(
"diagnostics.enable",
diagnostics_enable,
rclcpp::Parameter("diagnostics.enable", false));
if (!flag_diag_enable) {
RCLCPP_WARN_ONCE(
get_logger(),
"Could not get diagnostics.enable, setting: %d",
diagnostics_enable.as_bool());
}
rclcpp::Parameter min_freq;
auto flag_min_freq = get_parameter_or(
"diagnostics.min_freq",
min_freq,
rclcpp::Parameter("diagnostics.min_freq", 40.0));
if (!flag_min_freq) {
RCLCPP_WARN_ONCE(
get_logger(),
"Could not get min_freq, setting: %f",
min_freq.as_double());
}
rclcpp::Parameter max_freq;
auto flag_max_freq = get_parameter_or(
"diagnostics.max_freq",
max_freq,
rclcpp::Parameter("diagnostics.max_freq", 45.0));
if (!flag_max_freq) {
RCLCPP_WARN_ONCE(
get_logger(),
"Could not get max_freq, setting: %f",
max_freq.as_double());
}
rclcpp::Parameter period;
auto flag_period = get_parameter_or(
"diagnostics.period",
period,
rclcpp::Parameter("diagnostics.period", 1.0));
if (!flag_period) {
RCLCPP_WARN_ONCE(
get_logger(),
"Could not get period, setting: %f",
period.as_double());
}
if (diagnostics_enable.as_bool()) {
min_freq_ = min_freq.as_double();
max_freq_ = max_freq.as_double();
auto freq_param = diagnostic_updater::FrequencyStatusParam(
&min_freq_,
&max_freq_);
freq_status_ = std::make_shared<
diagnostic_updater::FrequencyStatus>(freq_param);
sensor_status_ = std::make_shared<
SensorStatus>();
updater_ = std::make_unique<diagnostic_updater::Updater>(
this, period.as_double());
updater_->add(*freq_status_);
updater_->add(*sensor_status_);
updater_->setHardwareID(hardware_id);
}
rclcpp::Parameter is_unittest;
auto flag_is_unittest = get_parameter_or(
"is_unittest",
is_unittest,
rclcpp::Parameter("is_unittest", false));
if (!flag_is_unittest) {
RCLCPP_WARN_ONCE(
get_logger(),
"Could not get is_unittest, setting: %d",
is_unittest.as_bool());
}
tcp_ = std::make_unique<driver_common_utils::TcpIO>(
ip.as_string(), port.as_int());
if (is_unittest.as_bool()) {
rclcpp::Rate rate(5.0);
rate.sleep();
}
auto flag_init = tcp_->init();
if (flag_init) {
rclcpp::QoS qos(rclcpp::KeepLast(1));
pub_imu_ = create_publisher<GImu>("imu", qos);
boost::asio::async_read_until(
tcp_->getSocket(),
boost::asio::dynamic_buffer(buffer_),
delim_,
boost::bind(
&ImuDriver::receiveHandler,
this,
boost::asio::placeholders::error,
boost::asio::placeholders::bytes_transferred));
tcp_->run();
} else {
RCLCPP_ERROR_ONCE(get_logger(), "Socket init error");
}
}
void ImuDriver::receiveHandler(
const boost::system::error_code & error, std::size_t bytes_transferred)
{
if (rclcpp::ok()) {
if ((!error.failed() || error == boost::asio::error::message_size)) {
if (buffer_.size() > buffer_limit_) {
buffer_.clear();
}
if (bytes_transferred && buffer_.size()) {
freq_status_->tick();
auto buffer = buffer_.substr(0, bytes_transferred);
auto get_header = buffer.at(0);
if (get_header == header_) {
auto status = decode<uint16_t>(
buffer, status_start_, status_end_, true);
auto yaw = toRadian(
decode<float>(
buffer, yaw_start_, yaw_end_));
auto pitch = toRadian(
decode<float>(
buffer, pitch_start_, pitch_end_));
auto roll = toRadian(
decode<float>(
buffer, roll_start_, roll_end_));
tf2::Quaternion quaternion;
quaternion.setRPY(roll, pitch, yaw);
GImu gimu;
gimu.header.frame_id = frame_id_;
gimu.header.stamp = get_clock()->now();
auto mode_bit_1 = boolBitValue(status, 0);
auto mode_bit_2 = boolBitValue(status, 1);
gimu.mode = (mode_bit_1 * 1) + (mode_bit_2 * 2);
gimu.gnss_fix = boolBitValue(status, 2);
gimu.imu_error = boolBitValue(status, 4);
gimu.magnetometer_pressure_error = boolBitValue(status, 5);
gimu.gnss_error = boolBitValue(status, 6);
gimu.gnss_heading_ins = boolBitValue(status, 8);
gimu.gnss_compass = boolBitValue(status, 9);
gimu.imu.orientation.w = quaternion.getW();
gimu.imu.orientation.x = quaternion.getX();
gimu.imu.orientation.y = quaternion.getY();
gimu.imu.orientation.z = quaternion.getZ();
sensor_status_->sensorStatus(
gimu.mode,
gimu.gnss_fix,
gimu.imu_error,
gimu.magnetometer_pressure_error,
gimu.gnss_error);
pub_imu_->publish(gimu);
}
buffer_.erase(0, bytes_transferred);
}
boost::asio::async_read_until(
tcp_->getSocket(),
boost::asio::dynamic_buffer(buffer_),
delim_,
boost::bind(
&ImuDriver::receiveHandler,
this,
boost::asio::placeholders::error,
boost::asio::placeholders::bytes_transferred));
} else {
RCLCPP_ERROR(
get_logger(),
"Receive handler error: %s", error.message().c_str());
}
} else {
RCLCPP_INFO(get_logger(), "Receive handler: node shutdown called");
}
}
} // namespace vectornav_driver
so should be done any changes in the driver too?
Thanks
Hi I’m figuring out how to share Plex library in dart.
I'm helping myself with this working script in python ( it works)
https://gist.github.com/JonnyWong16/f8139216e2748cb367558070c1448636
unfortunately my code returns an http error 400, bad request
note: When I run the dart code there are no shared library.
maybe the payload is not correct :|
Thank for any help
import 'package:http/http.dart' as http;
class Server {
String token, ip, port;
Server(this.ip, this.port, this.token);
share() async {
var server_id = '00000000000000000000000000000000'; fake id
var library_section_ids = '97430074'; // right id , it works in python
var invited_id = '50819899'; // right id , it works in python
var headers = {
'Accept': 'application/json',
'X-Plex-Token': token,
};
var data =
'["server_id": $server_id,'
' "shared_server":["library_section_ids":[$library_section_ids],'
' "invited_id":$invited_id]';
var res = await http.post(
Uri.parse(
'https://plex.tv/api/servers/$server_id/shared_servers/'),
headers: headers,
body: data);
if (res.statusCode != 200) {
throw Exception('http.post error: statusCode= ${res.statusCode}');
}
print(res.body);
}
}
void main(List<String> arguments) async {
var srv = Server('xxx.yyy.xxx.yyy', '32400', '000000000-1111');
await srv.share();
}
The old code has a few bug and don't encode perfectly the data
I solved with 'dio' package and this link helped me a lot https://reqbin.com/
If the user has no shared library this code add a new one
import 'package:dio/dio.dart';
class Server {
String token;
Server(this.token);
test() async {
var serverId = '';
var librarySectionIds = ;
var invitedId = ;
var dio = Dio();
var data = {
"server_id": serverId,
"shared_server": {
"library_section_ids": librarySectionIds,
"invited_id": invitedId
}
};
dio.options.headers['Accept'] = 'application/json';
dio.options.headers["authorization"] = "Bearer $token";
var response = await dio.post(
'https://plex.tv/api/servers/$serverId/shared_servers/',
data: data);
print(response);
}
Code:
const WebSocket = require('ws');
const shaUtil = require('js-sha256');
String.prototype.hashCode = function() {
return shaUtil(String(this));
};
const server = new WebSocket.Server({port:2909}); // Change to another port if you like.
// Hashes of client keys. Generate them using getHash.js and place them here.
const passes = {
'86cec081a5288000ddb804c24ac70de62a5060e5b4f4068b01a01f9f29dddacc': 0, // Receiver's key.
'c01223ad2ba8cdd184ded788cf6d3469111229cbe6cb3939ce24f471e8f257ca': 1, // Buzzer #1's key.
'5476be1700a54be0572b0066b32b5905986641fa163c5eabd52369eb3a2685cf': 2 // Buzzer #2's key...
};
var receiver = null;
var senders = {1: null, 2: null, 3: null, 4: null}; // You can add more/remove buzzers if you want to.
const lockClients = true; // If it is true, a connected client will not be overriden by another client logging in until it disconnects.
if (lockClients) canConnect = function(id) {
if (id == 0) return receiver === null;
return senders[id] === null;
}
else canConnect = function(id) {
return true;
}
function processBuzzer(client) {
if (receiver == null) return;
receiver.send('BUZZER '+client.buzzerId);
//console.log('Buzzer #'+client.buzzerId+' is activated.');
}
function onDisconnect(id) {
if (id === 0) {
receiver = null;
console.log('Receiver has disconnected.');
} else {
senders[id] = null;
console.log('Sender #' + id + ' has disconnected.');
}
}
server.on('connection', function(client) {
client.sendBuzzer = function() {};
client.on('message', function(message) {
if (message.startsWith('PASSWORD: ')) {
let id = passes[message.substring(10).hashCode()];
if (id !== undefined && canConnect(id)) {
if (client.buzzerId !== undefined) onDisconnect(client.buzzerId);
client.buzzerId = id;
if (id === 0) {
receiver = client;
console.log('Receiver has connected.');
} else {
senders[id] = client;
console.log('Sender #' + id + ' has connected.');
client.sendBuzzer = function() { processBuzzer(this) };
client.send('CONNECTED SUCCESSFULLY');
}
}
}
if (message == 'BUZZER') client.sendBuzzer();
});
client.on('close', function() {
if (client.buzzerId !== undefined) onDisconnect(client.buzzerId);
});
});
console.log('Server is running.');
If i started this with nodeJS with "node server.js", it appears Server is running.....no mistakes.....and now, Iwill connect to this server with the receiver.py (Python-File):
serverAddress = 'ws://192.168.1.50:2909' # The server's address.
clientKey = 'receiver' # The client key corresponding to the receiver's hash.
buzzerSoundFile = 'buzzer.wav' # The name/path of the buzzer sound file.
import asyncio
import websockets
from win32com.client import Dispatch
import winsound
from threading import Thread
app = Dispatch('Powerpoint.Application')
def playBuzzerSound():
global buzzerSoundFile
winsound.PlaySound(buzzerSoundFile, winsound.SND_FILENAME+winsound.SND_ASYNC)
async def mainFunction():
global serverAddress, clientKey
async with websockets.connect(serverAddress) as ws:
await ws.send('PASSWORD: ' + clientKey)
while True:
msg = await ws.recv()
if msg.startswith('BUZZER '):
if app.Run('onBuzzerActivated', int(msg[7:])):
Thread(target=playBuzzerSound).start();
asyncio.get_event_loop().run_until_complete(mainFunction())
I got an error 200 like this:
websockets.exceptions.InvalidStatusCode: server rejected WebSocket connection: HTTP 200
What's wrong ??
I have a Tensorflow model and I have converted it to ".tflite" but I don't know the way how to implement it on android. I followed the TensorFlow guidelines to implement it in android but since there is no XML code given the TensorFlow website I am struggling to connect it with the front end (XML). I need a clear explanation of how to use my model in android studio using java.
I followed the official instructions given in the TensorFlow website to implement the model in android.
A sample code of how to implement object detection based on tflite model from Tensorflow. I suppose these kinds of answers are not the best answers, but I happened to have a simple example of your exact problem.
Note: it does detect objects and outputs their labels into standard output using Log.d. No boxes or labels will be drawn around detected images.
Download started models and labels from here. Put them into the assets folder of your project.
Java
import android.content.pm.PackageManager;
import android.media.Image;
import android.os.Bundle;
import android.util.Log;
import android.widget.Toast;
import androidx.annotation.NonNull;
import androidx.annotation.Nullable;
import androidx.appcompat.app.AppCompatActivity;
import androidx.camera.core.Camera;
import androidx.camera.core.CameraSelector;
import androidx.camera.core.ExperimentalGetImage;
import androidx.camera.core.ImageAnalysis;
import androidx.camera.core.ImageProxy;
import androidx.camera.core.Preview;
import androidx.camera.lifecycle.ProcessCameraProvider;
import androidx.camera.view.PreviewView;
import androidx.core.app.ActivityCompat;
import androidx.core.content.ContextCompat;
import com.google.common.util.concurrent.ListenableFuture;
import com.google.mlkit.common.model.LocalModel;
import com.google.mlkit.vision.common.InputImage;
import com.google.mlkit.vision.objects.DetectedObject;
import com.google.mlkit.vision.objects.ObjectDetection;
import com.google.mlkit.vision.objects.ObjectDetector;
import com.google.mlkit.vision.objects.custom.CustomObjectDetectorOptions;
import java.io.BufferedReader;
import java.io.IOException;
import java.io.InputStreamReader;
import java.util.ArrayList;
import java.util.Iterator;
import java.util.List;
import java.util.NoSuchElementException;
import java.util.concurrent.ExecutionException;
public class ActivityExample extends AppCompatActivity {
private ListenableFuture<ProcessCameraProvider> cameraProviderFuture;
private ObjectDetector objectDetector;
private PreviewView prevView;
private List<String> labels;
private int REQUEST_CODE_PERMISSIONS = 101;
private String[] REQUIRED_PERMISSIONS =
new String[]{"android.permission.CAMERA"};
#Override
protected void onCreate(#Nullable Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_fullscreen);
prevView = findViewById(R.id.viewFinder);
prepareObjectDetector();
prepareLabels();
if (allPermissionsGranted()) {
startCamera();
} else {
ActivityCompat.requestPermissions(this, REQUIRED_PERMISSIONS, REQUEST_CODE_PERMISSIONS);
}
}
private void prepareLabels() {
try {
InputStreamReader reader = new InputStreamReader(getAssets().open("labels_mobilenet_quant_v1_224.txt"));
labels = readLines(reader);
} catch (IOException e) {
e.printStackTrace();
}
}
private List<String> readLines(InputStreamReader reader) {
BufferedReader bufferedReader = new BufferedReader(reader, 8 * 1024);
Iterator<String> iterator = new LinesSequence(bufferedReader);
ArrayList<String> list = new ArrayList<>();
while (iterator.hasNext()) {
list.add(iterator.next());
}
return list;
}
private void prepareObjectDetector() {
CustomObjectDetectorOptions options = new CustomObjectDetectorOptions.Builder(loadModel("mobilenet_v1_1.0_224_quant.tflite"))
.setDetectorMode(CustomObjectDetectorOptions.SINGLE_IMAGE_MODE)
.enableMultipleObjects()
.enableClassification()
.setClassificationConfidenceThreshold(0.5f)
.setMaxPerObjectLabelCount(3)
.build();
objectDetector = ObjectDetection.getClient(options);
}
private LocalModel loadModel(String assetFileName) {
return new LocalModel.Builder()
.setAssetFilePath(assetFileName)
.build();
}
private void startCamera() {
cameraProviderFuture = ProcessCameraProvider.getInstance(this);
cameraProviderFuture.addListener(() -> {
try {
ProcessCameraProvider cameraProvider = cameraProviderFuture.get();
bindPreview(cameraProvider);
} catch (ExecutionException e) {
// No errors need to be handled for this Future.
// This should never be reached.
} catch (InterruptedException e) {
}
}, ContextCompat.getMainExecutor(this));
}
private void bindPreview(ProcessCameraProvider cameraProvider) {
Preview preview = new Preview.Builder().build();
CameraSelector cameraSelector = new CameraSelector.Builder()
.requireLensFacing(CameraSelector.LENS_FACING_BACK)
.build();
ImageAnalysis imageAnalysis = new ImageAnalysis.Builder()
.setBackpressureStrategy(ImageAnalysis.STRATEGY_KEEP_ONLY_LATEST)
.build();
YourAnalyzer yourAnalyzer = new YourAnalyzer();
yourAnalyzer.setObjectDetector(objectDetector, labels);
imageAnalysis.setAnalyzer(
ContextCompat.getMainExecutor(this),
yourAnalyzer);
Camera camera =
cameraProvider.bindToLifecycle(
this,
cameraSelector,
preview,
imageAnalysis
);
preview.setSurfaceProvider(prevView.createSurfaceProvider(camera.getCameraInfo()));
}
private Boolean allPermissionsGranted() {
for (String permission : REQUIRED_PERMISSIONS) {
if (ContextCompat.checkSelfPermission(
this,
permission
) != PackageManager.PERMISSION_GRANTED
) {
return false;
}
}
return true;
}
#Override
public void onRequestPermissionsResult(int requestCode, #NonNull String[] permissions, #NonNull int[] grantResults) {
if (requestCode == REQUEST_CODE_PERMISSIONS) {
if (allPermissionsGranted()) {
startCamera();
} else {
Toast.makeText(this, "Permissions not granted by the user.", Toast.LENGTH_SHORT)
.show();
finish();
}
}
}
private static class YourAnalyzer implements ImageAnalysis.Analyzer {
private ObjectDetector objectDetector;
private List<String> labels;
public void setObjectDetector(ObjectDetector objectDetector, List<String> labels) {
this.objectDetector = objectDetector;
this.labels = labels;
}
#Override
#ExperimentalGetImage
public void analyze(#NonNull ImageProxy imageProxy) {
Image mediaImage = imageProxy.getImage();
if (mediaImage != null) {
InputImage image = InputImage.fromMediaImage(
mediaImage,
imageProxy.getImageInfo().getRotationDegrees()
);
objectDetector
.process(image)
.addOnFailureListener(e -> imageProxy.close())
.addOnSuccessListener(detectedObjects -> {
// list of detectedObjects has all the information you need
StringBuilder builder = new StringBuilder();
for (DetectedObject detectedObject : detectedObjects) {
for (DetectedObject.Label label : detectedObject.getLabels()) {
builder.append(labels.get(label.getIndex()));
builder.append("\n");
}
}
Log.d("OBJECTS DETECTED", builder.toString().trim());
imageProxy.close();
});
}
}
}
static class LinesSequence implements Iterator<String> {
private BufferedReader reader;
private String nextValue;
private Boolean done = false;
public LinesSequence(BufferedReader reader) {
this.reader = reader;
}
#Override
public boolean hasNext() {
if (nextValue == null && !done) {
try {
nextValue = reader.readLine();
} catch (IOException e) {
e.printStackTrace();
nextValue = null;
}
if (nextValue == null) done = true;
}
return nextValue != null;
}
#Override
public String next() {
if (!hasNext()) {
throw new NoSuchElementException();
}
String answer = nextValue;
nextValue = null;
return answer;
}
}
}
XML layout
<?xml version="1.0" encoding="utf-8"?>
<androidx.camera.view.PreviewView
xmlns:android="http://schemas.android.com/apk/res/android"
android:id="#+id/viewFinder"
android:layout_width="match_parent"
android:layout_height="match_parent" />
Gradle file configuration
android {
...
aaptOptions {
noCompress "tflite" // Your model\'s file extension: "tflite", "lite", etc.
}
compileOptions {
sourceCompatibility JavaVersion.VERSION_1_8
targetCompatibility JavaVersion.VERSION_1_8
}
}
dependencies {
...
implementation 'com.google.mlkit:object-detection-custom:16.0.0'
def camerax_version = "1.0.0-beta03"
// CameraX core library using camera2 implementation
implementation "androidx.camera:camera-camera2:$camerax_version"
// CameraX Lifecycle Library
implementation "androidx.camera:camera-lifecycle:$camerax_version"
// CameraX View class
implementation "androidx.camera:camera-view:1.0.0-alpha10"
}
I am working with a chat application with a simple python localhost server, using NSStream to send and recieve data via network socket connection. App just worked fine in the iPhone application, but not getting stream in the mac application.
My Python Server Code
from twisted.internet.protocol import Factory, Protocol
from twisted.internet import reactor
class MacChat(Protocol):
def connectionMade(self):
print "a client connected"
self.factory.clients.append(self)
print "clients are ", self.factory.clients
def connectionLost(self, reason):
self.factory.clients.remove(self)
def dataReceived(self, data):
a = data.split(':')
print a
if len(a) > 1:
command = a[0]
content = a[1]
msg = ""
if command == "iam":
self.name = content
msg = self.name + " has joined"
elif command == "msg":
msg = self.name + ": " + content
print msg
for c in self.factory.clients:
c.message(msg)
def message(self, message):
self.transport.write(message + '\n')
factory = Factory()
factory.clients = []
factory.protocol = MacChat
reactor.listenTCP(80, factory)
print "Mac Chat server started"
reactor.run()
Mac
ChatViewController.h
#import <Cocoa/Cocoa.h>
#interface ChatViewController : NSViewController
#property (strong,nonatomic) NSString *userName;
#end
ChatViewController.m
#import "ChatViewController.h"
#interface ChatViewController ()<NSTableViewDataSource,NSTableViewDelegate,NSStreamDelegate>
{
NSInputStream *inputStream;
NSOutputStream *outputStream;
NSMutableArray * messages;
}
#property (weak) IBOutlet NSButton *btnSend;
#property (weak) IBOutlet NSTextField *txtMessage;
#property (weak) IBOutlet NSTableView *tableview;
#end
#implementation ChatViewController
- (void)viewDidLoad {
[super viewDidLoad];
// Do view setup here.
}
-(void)setUserName:(NSString *)userName
{
[self initNetworkCommunication];
NSString *response = [NSString stringWithFormat:#"iam:%#",userName];
NSData *data = [[NSData alloc] initWithData:[response dataUsingEncoding:NSASCIIStringEncoding]];
[outputStream write:[data bytes] maxLength:[data length]];
messages = [[NSMutableArray alloc] init];
}
- (void)initNetworkCommunication {
CFReadStreamRef readStream;
CFWriteStreamRef writeStream;
CFStreamCreatePairWithSocketToHost(NULL, (CFStringRef)#"localhost", 80, &readStream, &writeStream);
inputStream = (__bridge_transfer NSInputStream *)readStream;
outputStream = (__bridge NSOutputStream *)writeStream;
inputStream.delegate=self;
outputStream.delegate=self;
[inputStream scheduleInRunLoop:[NSRunLoop currentRunLoop] forMode:NSDefaultRunLoopMode];
[outputStream scheduleInRunLoop:[NSRunLoop currentRunLoop] forMode:NSDefaultRunLoopMode];
[inputStream open];
[outputStream open];
}
- (IBAction)btnAtnSend:(id)sender {
NSString *response = [NSString stringWithFormat:#"msg:%#", self.txtMessage.stringValue];
NSData *data = [[NSData alloc] initWithData:[response dataUsingEncoding:NSASCIIStringEncoding]];
[outputStream write:[data bytes] maxLength:[data length]];
self.txtMessage.stringValue = #"";
}
- (NSView *)tableView:(NSTableView *)tableView viewForTableColumn:(NSTableColumn *)tableColumn row:(NSInteger)row {
// Get a new ViewCell
NSTableCellView *cellView = [tableView makeViewWithIdentifier:tableColumn.identifier owner:self];
// Since this is a single-column table view, this would not be necessary.
// But it's a good practice to do it in order by remember it when a table is multicolumn.
if( [tableColumn.identifier isEqualToString:#"cell"] )
{
NSString *s = (NSString *) [messages objectAtIndex:row];
cellView.textField.stringValue = s;
}
return cellView;
}
- (NSInteger)numberOfRowsInTableView:(NSTableView *)tableView {
return [messages count];
}
-(CGFloat)tableView:(NSTableView *)tableView heightOfRow:(NSInteger)row
{
return 30;
}
#pragma mark NSStream Delegate
-(void)stream:(NSStream *)aStream handleEvent:(NSStreamEvent)eventCode
{
switch (eventCode) {
case NSStreamEventOpenCompleted:
NSLog(#"Stream opened");
break;
case NSStreamEventHasBytesAvailable:
if (aStream == inputStream) {
uint8_t buffer[1024];
int len;
while ([inputStream hasBytesAvailable]) {
len = (int)[inputStream read:buffer maxLength:sizeof(buffer)];
if (len > 0) {
NSString *output = [[NSString alloc] initWithBytes:buffer length:len encoding:NSASCIIStringEncoding];
if (nil != output) {
NSLog(#"server said: %#", output);
[self messageReceived:output];
}
}
}
}
break;
case NSStreamEventErrorOccurred:
NSLog(#"Can not connect to the host!");
break;
case NSStreamEventEndEncountered:
{
[aStream close];
[aStream removeFromRunLoop:[NSRunLoop currentRunLoop] forMode:NSDefaultRunLoopMode];
}
break;
default:
NSLog(#"Unknown event");
}
}
- (void) messageReceived:(NSString *)message {
[messages addObject:message];
[self.tableview reloadData];
[self.tableview scrollRowToVisible:messages.count-1];
}
#end
OK. Looks like this is based on the example from raywenderlich.com.
The example is flawed, as mentioned by "glyph" in the comments section:
This tutorial makes some incorrect assumptions about how data is delivered to an application from a network.
Read glyph's post before building on this code.
For your particular problem, you've made a lot of changes to the original code. At a glance, it's not clear to me that "setUserName" is ever even called.
Without that, nothing else will happen.