Loading...
Loading...
Loading...
Loading...
Loading...
Loading...
Loading...
Loading...
Loading...
Loading...
Loading...
Loading...
Loading...
Loading...
Loading...
Loading...
Loading...
Loading...
Loading...
Loading...
Loading...
Loading...
Loading...
Loading...
Loading...
Loading...
Loading...
Loading...
Loading...
Loading...
Loading...
Loading...
Loading...
Loading...
Loading...
Loading...
Loading...
Loading...
Loading...
Loading...
Loading...
Loading...
Loading...
Loading...
Loading...
Loading...
Loading...
Loading...
Loading...
Loading...
Loading...
Loading...
Loading...
Loading...
Loading...
Loading...
Loading...
Loading...
Loading...
Loading...
Loading...
Loading...
Loading...
Loading...
Loading...
Loading...
Loading...
Loading...
Loading...
Loading...
Loading...
Loading...
Loading...
Loading...
Loading...
Loading...
Loading...
Loading...
Loading...
Loading...
Loading...
Loading...
Loading...
Loading...
Loading...
Loading...
Loading...
Loading...
Loading...
Loading...
Loading...
Loading...
Loading...
Loading...
Loading...
Loading...
An alternative function that is sometimes suggested as an alternative to pid
---
title: Gooder Title
author: Pito Salas
description: nicer subtitle
order: where does this one appear in order
status: obsolete|new|tophit
date: month-year
---How to use the claw
Step by step tutorial for creating a gazebo world
For advanced users who are trying to tix weird problems
start_x=1
gpu_mem=128
How to use the TKInter package for Ros Tools
import tkinter as tk
import rospyApriiltags are al alternative to Aruco Tags
Arguments and parameters are important tags for roslaunch files that are similar, but not quite the same.
Collection current knowledge we found in our GPS research
from cv_bridge import CvBridge
def __init__(self):
self.bridge = CvBridge()
def image_callback(self, msg):
# get raw image
# image = self.bridge.imgmsg_to_cv2(msg)
# get compressed image
np_arr = np.frombuffer(msg.data, np.uint8)
img_np = cv2.imdecode(np_arr, cv2.IMREAD_COLOR)
image = cv2.cvtColor(img_np, cv2.COLOR_BGR2HSV)sudo apt-get update
sudo apt-get install ros-noetic-camera-calibrationrosrun camera_calibration cameracalibrator.py --size MxN --square
RESULT image:=/CAM/image_raw camera:=CAMyolo detect train model=yolov8n.pt data=traffic_sign.yaml
<?xml version="1.0" ?>
<sdf version="1.6">
<world name="default">
<!-- A ground plane -->
<include>
<uri>model://ground_plane</uri>
</include>
<!-- A global light source -->
<include>
<uri>model://sun</uri>
</include>def turn_function():
t=Twist()
t.angular.z=0.5
cmd_vel.publish(t)
def stop_function():
t=Twist()
t.angular.z=0
cmd_vel.publish(t)rospy.init_node("tkinter_example")
cmd_vel = rospy.Publisher('cmd_vel', Twist, queue_size=1)pip3 install flask-askfrom flask_ask import Ask, statement, question, elicit_slot, confirm_slotexample.launch<arg name="world_name" value="$(find simulation_app)/worlds/example.world"/>example.worldsimulation_app/worldscatkin_ws/src/turtlebot3_simulations/turtlebot3_gazebo/worldsWhen you run a .launch it grabs packages from the wrong place



vcgencmd get_mem gpucd ~/catkin_ws
rm -rf build/ devel/
catkin_make
source devel/setup.bashOpenCV and HSV color:
Opencv use hsv to recognize color, but it use different scale than normal.
Here is a comparison of scale:
normal use H: 0-360, S: 0-100, V: 0-100
Opencv use H: 0-179, S: 0-255, V: 0-255 <include file="$(find gazebo_ros)/launch/empty_world.launch">
<arg name="world_name" value="$(find {project_name})/worlds/{world_name}.world"/>
<arg name="paused" value="false"/>
<arg name="use_sim_time" value="true"/>
<arg name="gui" value="true"/>
<arg name="headless" value="false"/>
<arg name="debug" value="false"/>
</include>cam0:
camera_model: pinhole
intrinsics: [684.9223702799867, 686.2362451089259, 307.24291147440965, 257.2964284210188]
distortion_model: plumb_bob
distortion_coeffs: [9.39260444e-03, 4.90535732e-01, 1.48962564e-02, 4.68503188e-04, -1.77954077e+00]
resolution: [640, 480]
tagtopic: /cam0def __init__(self):
self.counter = 1
def image_callback(self, msg):
# set frame rate 1/3
if self.counter % 3 != 0:
self.counter += 1
return
else:
self.counter = 1 from std_msgs.msg import Bool #Code to import Bool servo_pub = rospy.Publisher('/servo', Bool, queue_size=1) #Code for publisher servo_pub.publish(True) # Opens claw
servo_pub.publish(False) # Closes clawrosrun prrexamples cvexample.pyrqtrqt_image_viewsudo add-apt-repository ppa:pipewire-debian/pipewire-upstream
sudo apt update
sudo apt install pipewire
sudo apt install libspa-0.2-bluetooth
sudo apt install pipewire-audio-client-libraries
systemctl --user daemon-reload
systemctl --user --now disable pulseaudio.service pulseaudio.socket
systemctl --user mask pulseaudio
systemctl --user --now enable pipewire-media-session.servicepactl infosystemctl --user restart pipewiresystemctl --user restart pipewire-pulsesudo systemctl status bluetooth sudo bluetoothctl
agent on
default-agent
scan on
pair XX:XX:XX:XX
connect XX:XX:XX:XX
trust XX:XX:XX:XXpactl listpactl set-card-profile bluez_card.84_6B_45_98_FD_8E headset-head-unitparec -d bluez_card.84_6B_45_98_FD_8E.headset-head-unit | lame -r -V0 - out.mp3
mpg321 out.mp3pactl list short sources
pactl list short sinkspactl set-default-source <device_name_output>
pactl set-default-sink <device_name_input>from math import atan2
import math
def smartRotate(posex1, posey1, posex2, posey2, theta):
inc_x = posex2 -posex1
inc_y = posey2 -posey1
angle_goal = atan2(inc_y, inc_x)
# if angle_to_goal < 0:
# angle_to_goal = (2* math.pi) + angle_to_goal
print("angle goal = ",angle_goal)
goal_range = theta + math.pi
wrapped = goal_range - (2 * math.pi)
if abs(angle_goal - theta) > 0.1:
print(theta)
print("goal_range = ",goal_range)
if (goal_range) > (2 * math.pi) and (theta < angle_goal or angle_goal < wrapped):
print("go left")
elif (goal_range) < (2 * math.pi) and (theta < angle_goal and angle_goal < goal_range):
print("go left")
else:
print("go right")
else:
print("go straight") smartRotation(0,0,2,2,0)<actor name="actor1">
<pose>0 0 0 0 0 0</pose>
<skin>
<filename>PERSON_MESH.dae</filename>
<scale>1.0</scale>
</skin>
<animation name="NAME">
<filename>ANIMATION_FILE_NAME.dae</filename>
<scale>1.000000</scale>
<interpolate_x>true</interpolate_x>
</animation>
<plugin name="PLUGIN NAME" filename="PLUGIN_FILE_NAME.so">
...
...
...
</plugin>
<script>
<trajectory id="0" type="NAME">
<waypoint>
<time>0</time>
<pose>0 2 0 0 0 -1.57</pose>
</waypoint>
<waypoint>
<time>2</time>
<pose>0 -2 0 0 0 -1.57</pose>
</waypoint>
...
...
...
...
</trajectory>
</script>
</actor><actor name="actor">
...
...
...
</actor><actor name="actor">
<pose>0 0 0 0 0 0</pose>
...
...
</actor><actor name="actor">
<skin>
<filename>moonwalk.dae</filename>
<scale>1.0</scale>
</skin>
</actor><actor name="actor">
<skin>
<filename>moonwalk.dae</filename>
<scale>1.0</scale>
</skin>
<animation name="walking">
<filename>walk.dae</filename>
<scale>1.000000</scale>
</animation>
</actor><actor name="actor">
<skin>
<filename>moonwalk.dae</filename>
<scale>1.0</scale>
</skin>
<animation name="walking">
<filename>walk.dae</filename>
<scale>1.000000</scale>
</animation>
<script>
<trajectory id="0" type="NAME">
<waypoint>
<time>0</time>
<pose>0 2 0 0 0 -1.57</pose>
</waypoint>
<waypoint>
<time>2</time>
<pose>0 -2 0 0 0 -1.57</pose>
</waypoint>
...
...
...
...
</trajectory>
</script>
</actor><actor name="actor">
<skin>
<filename>moonwalk.dae</filename>
<scale>1.0</scale>
</skin>
<animation name="walking">
<filename>walk.dae</filename>
<scale>1.000000</scale>
</animation>
<plugin name="PLUGIN_NAME" filename="NAME_OF_PLUGIN_FILE">
...
</plugin>
</actor><launch>
<node name=" " pkg=" " type=" " output=" "/>
<node name=" " pkg=" " type=" " output=" "/>
</launch><launch>
<node name="MiniScoutMain" pkg="mini_scouter" type="MiniScoutMain.py" output="screen"></node>
<node name="laser_scan" pkg="mini_scouter" type="scan_arouund.py" output="screen"></node>
</launch><?xml version="1.0"?>
<package format="2">
<name>object_sorter</name>
<version>0.0.0</version>sudo apt-get install xterm#!/usr/bin/env python
'''
A module with a GPS node.
GPS2IP: http://www.capsicumdreams.com/gps2ip/
'''
import json
import re
import rospy
import socket
from std_msgs.msg import String
class GPS:
'''A node which listens to GPS2IP Lite through a socket and publishes a GPS topic.'''
def __init__(self):
'''Initialize the publisher and instance variables.'''
# Instance Variables
self.HOST = rospy.get_param('~HOST', '172.20.38.175')
self.PORT = rospy.get_param('~PORT', 11123)
# Publisher
self.publisher = rospy.Publisher('/gps', String, queue_size=1)
def get_coords(self):
'''A method to receive the GPS coordinates from GPS2IP Lite.'''
# Instantiate a client object
with socket.socket(socket.AF_INET, socket.SOCK_STREAM) as s:
s.connect((self.HOST, self.PORT))
# The data is received in the RMC data format
gps_data = s.recv(1024)
# Transform data into dictionary
gps_keys = ["message_id", "latitude", "ns_indicator", "longitude", "ew_indicator"]
gps_values = re.split(',|\*', gps_data.decode())[:5]
gps_dict = dict(zip(gps_keys, gps_values))
# Cleanse the coordinate data
for key in ['latitude', 'longitude']:
# Identify the presence of a negative number indicator
neg_num = False
# The GPS2IP application transmits a negative coordinate with a zero prepended
if gps_dict[key][0] == '0':
neg_num = True
# Transform the longitude and latitude into a format that can be utilized by the front-end web-client
gps_dict[key] = float(gps_dict[key]) / 100
# Apply the negative if the clause was triggered
if neg_num:
gps_dict[key] = -1 * gps_dict[key]
# Publish the decoded GPS data
self.publisher.publish(json.dumps(gps_dict))
if __name__ == '__main__':
# Initialize a ROS node named GPS
rospy.init_node("gps")
# Initialize a GPS instance with the HOST and PORT
gps_node = GPS()
# Continuously publish coordinated until shut down
while not rospy.is_shutdown():
gps_node.get_coords()root=tk.Tk()
root.wm_title("Test TKinter Window")
root.geometry("250x250") #set size of windowturn_button=tk.Button(
root,
text="turn",
bg="grey",
command=turn_function
)
stop_button=tk.Button(
root,
text="stop",
bg="grey",
command=stop_function
)turn_button.pack()
stop_button.pack()root.mainloop()#!/usr/bin/env python
import tkinter as tk #import tkinter
from geometry_msgs.msg import Twist
import rospy
#Put callbacks here
#Put all standard functions here
#Put functions that are called by pressing GUI buttons here
def turn_function():
t=Twist() #creates a twist object
t.angular.z=0.5 #sets the angular velocity of that object so the robot will turn
cmd_vel.publish(t) #publishes the twist
def stop_function():
t=Twist()
t.angular.z=0
cmd_vel.publish(t)
#initialize rospy node, publishers, and subscribers
rospy.init_node("tkinter")
cmd_vel = rospy.Publisher('cmd_vel', Twist, queue_size=1) #standard publisher to control movement
#initialize tkinter window
root=tk.Tk() #initialize window
root.wm_title("Test TKinter Window") #set window name
root.geometry("250x250") #set size of window
#create widgets
turn_button=tk.Button( #creates a button
root, #sets the button to be on the root, but this could also be a frame or canvas if you want
text="turn", #The text on the button
bg="green", #The background of the button, tkinter lets you write out color names for many standard colors, but you can also use hex colors or rgb values
command=turn_function #command will tie a previously defined function to your button. You must define this function earlier in the code.
)
stop_button=tk.Button(
root,
text="stop",
bg="red",
command=stop_function
)
#adding the buttons to the window
turn_button.pack() #pack will simply stack each widget on the screen in the order they were packed, but that can be changed with various arguments in the pack method. Please check the tkinter documentation to see more options.
stop_button.pack()
#Tkinter Mainloop
root.mainloop()python xmltotxt.py -xml xml -out out<annotation>
<filename>image-0000016.jpg</filename>
<size>
<width>1920</width>
<height>1080</height>
</size>
<object>
<name>sedan</name>
<bndbox>
<xmin>75</xmin>
<ymin>190</ymin>
<xmax>125</xmax>
<ymax>210</ymax>
</bndbox>
</object>
</annotation>4 0.052083 0.185185 0.026042 0.018519# get a global parameter
rospy.get_param('/global_param_name')
# get a parameter from our parent namespace
rospy.get_param('param_name')
# get a parameter from our private namespace
rospy.get_param('~private_param_name')rospy.set_param('some_numbers', [1., 2., 3., 4.])
rospy.set_param('truth', True)
rospy.set_param('~private_bar', 1+2)<param name="robot_position" command="$(find some_package)/scripts/generate_random_position.py"/><launch>
<arg name="x_pos" default="0.0" />
<arg name="y_pos" default="0.0" />
<arg name="z_pos" default="0.0" />
...<!-- start world and launch gazebo -->
<include file="$(find gazebo_ros)/launch/empty_world.launch">
<arg name="world_name" value="$(find swarmbots)/worlds/$(arg world).world"/>
<arg name="paused" value="true"/>
<arg name="use_sim_time" value="true"/>
<arg name="gui" value="true"/>
<arg name="headless" value="false"/>
<arg name="debug" value="false"/>
</include><launch>
<arg name="followers" />
<arg name="ns" />
<!-- BEGIN robot[#] -->
<group ns="$(arg ns)">
<param name="tf_prefix" value="$(arg ns)" />
<include file="$(find swarmbots)/launch/one_robot.launch">
<arg name="robot_name" value="$(arg ns)" />
<arg name="followers" value="$(arg followers)" />
</include>
</group>
<!-- recursively start robot[#-1] -->
<arg name="new_followers" value="$(eval arg('followers') - 1)" />
<include file="$(find swarmbots)/launch/follower.launch" if="$(eval arg('new_followers') >= 0)">
<arg name="followers" value="$(arg new_followers)" />
<arg name="ns" value="robot$(arg new_followers)" />
</include>
</launch> if="$(eval arg('new_followers') >= 0)"#!/usr/bin/env python
import random, rospy, roslib
rospy.init_node("generate_random_x")
pos_range = float(rospy.get_param('pos_range', 3))
x_pos = random.uniform(-pos_range / 2, pos_range / 2)
rospy.set_param('map_merge/init_pose_x',x_pos)
print(x_pos)<param name="x_pos" command="$(find swarmbots)/src/generate_random_x.py" /><rosparam file="$(find turtlebot3_navigation)/param/local_costmap_params.yaml" command="load" />local_costmap:
global_frame: odom
robot_base_frame: base_footprint
update_frequency: 10.0
publish_frequency: 10.0
transform_tolerance: 0.5
static_map: false
rolling_window: true
width: 3
height: 3
resolution: 0.05app = Flask(__name__)
ask = Ask(app, '[endpoint]')ask = Ask(app, '/commands')@ask.launch
def start_skill():
return question('hello, what you need?)@ask.intent('bring_items')
def bring_items(): slots = request.get_json()['request']['intent']['slots']if 'value' not in slots['items'].keys()
return elicit_slot('items', 'what do you want me to bring?)elicit_slot('items','those are too heavy, give me something else to bring')@ask.intent('AMAZON.FallbackIntent')@ask.intent('AMAZON.CancelIntent')#!/usr/bin/env python
import rospy
from sensor_msgs.msg import CompressedImage
from cv_bridge import CvBridge
def image_callback(msg):
# Convert the compressed image message to a cv2 image
bridge = CvBridge()
cv2_image = bridge.compressed_imgmsg_to_cv2(msg, desired_encoding='passthrough')
# Process the image (e.g., resize, blur, etc.)
processed_image = process_image(cv2_image)
# Convert the processed cv2 image back to a compressed image message
compressed_image_msg = bridge.cv2_to_compressed_imgmsg(processed_image)
# Publish the processed image on the new topic
processed_image_pub.publish(compressed_image_msg)
def process_image(cv2_image):
# Perform image processing (e.g., resize, blur, etc.) and return the processed image
# Example: resized_image = cv2.resize(cv2_image, (new_width, new_height))
return cv2_image
if __name__ == '__main__':
rospy.init_node('image_processing_node')
rospy.Subscriber('/robot/camera_topic', CompressedImage, image_callback)
processed_image_pub = rospy.Publisher('/processed_image_topic', CompressedImage, queue_size=1)
rospy.spin()sudo apt install curl
curl 'https://raw.githubusercontent.com/Interbotix/interbotix_ros_manipulators/main/interbotix_ros_xsarms/install/amd64/xsarm_amd64_install.sh' > xsarm_amd64_install.sh
chmod +x xsarm_amd64_install.sh
./xsarm_amd64_install.sh -d noetic$ diskutil list
/dev/disk1 (synthesized):
#: TYPE NAME SIZE IDENTIFIER
0: APFS Container Scheme - +500.0 GB disk1
Physical Store disk0s2
1: APFS Volume Macintosh HD — Data 396.0 GB disk1s1
2: APFS Volume Preboot 81.9 MB disk1s2
3: APFS Volume Recovery 528.5 MB disk1s3
4: APFS Volume VM 4.3 GB disk1s4
5: APFS Volume Macintosh HD 11.0 GB disk1s5
/dev/disk4 (external, physical):
#: TYPE NAME SIZE IDENTIFIER
0: FDisk_partition_scheme *31.9 GB disk4
1: Windows_FAT_32 boot 268.4 MB disk4s1
2: Linux 31.6 GB disk4s2<?xml version='1.0'?>
<sdf version="1.4">
<model name="my_model">
<pose>0 0 0.5 0 0 0</pose>
<static>true</static>
<link name="link">
<inertial>
<mass>1.0</mass>
<inertia> <!-- inertias are tricky to compute -->
<!-- http://gazebosim.org/tutorials?tut=inertia&cat=build_robot -->
<ixx>0.083</ixx> <!-- for a box: ixx = 0.083 * mass * (y*y + z*z) -->
<ixy>0.0</ixy> <!-- for a box: ixy = 0 -->
<ixz>0.0</ixz> <!-- for a box: ixz = 0 -->
<iyy>0.083</iyy> <!-- for a box: iyy = 0.083 * mass * (x*x + z*z) -->
<iyz>0.0</iyz> <!-- for a box: iyz = 0 -->
<izz>0.083</izz> <!-- for a box: izz = 0.083 * mass * (x*x + y*y) -->
</inertia>
</inertial>
<collision name="collision">
<geometry>
<box>
<size>1 1 1</size>
</box>
</geometry>
</collision>
<visual name="visual">
<geometry>
<box>
<size>1 1 1</size>
</box>
</geometry>
</visual>
</link>
</model>
</sdf><geometry>
<mesh filename="package://package_name/model_path/model.stl" scale="0.01 0.01 0.01"/>
</geometry><include>
<uri>model://my_model</uri>
</include>export GAZEBO_MODEL_PATH=~/catkin_ws/src/package_name/model_path/bot = InterbotixManipulatorXS("px100", "arm", "gripper")
theta = math.atan(x/y)
dr = y / (math.cos(theta)) - current_r
dz = z - current_z
bot.arm.set_single_joint_position("waist", theta)
bot.arm.set_ee_cartesian_trajectory(r = dr, z = dz)from cv_bridge import CvBridge
def __init__(self):
self.bridge = CvBridge()
def image_callback(self, msg):
# get raw image
# image = self.bridge.imgmsg_to_cv2(msg)
# get compressed image
np_arr = np.frombuffer(msg.data, np.uint8)
img_np = cv2.imdecode(np_arr, cv2.IMREAD_COLOR)
image = cv2.cvtColor(img_np, cv2.COLOR_BGR2HSV)This FAQ documents specific instructions to define a new Message
An overview of how ROSBridge and ROSLIBJS works/utilized for command control
<arg name="model" default="burger" doc="model type [burger, waffle, waffle_pi]"/>
<arg name="seeker_pos_x" default="0"/>
<arg name="seeker_pos_y" default=" -0.5"/>
<arg name="seeker_pos_z" default=" 0.0"/>
<arg name="seeker_yaw" default="0"/>
<arg name="hider_pos_x" default="0"/>
<arg name="hider_pos_y" default="0.5"/>
<arg name="hider_pos_z" default=" 0.0"/>
<arg name="hider_yaw" default="0" />cd ${your_package_name}
mkdir launch}
mkdir worlds
mkdir models <launch>
<arg name="model" default="$(env TURTLEBOT3_MODEL)" doc="model type [burger, waffle, waffle_pi]"/>
<arg name="x_pos" default="0.0"/>
<arg name="y_pos" default="0.0"/>
<arg name="z_pos" default="0.0"/>
<include file="$(find gazebo_ros)/launch/empty_world.launch">
<arg name="world_name" value="$(find ${your_package_name})/worlds/${your_world_file_name}.world"/>
<arg name="paused" value="false"/>
<arg name="use_sim_time" value="true"/>
<arg name="gui" value="true"/>
<arg name="headless" value="false"/>
<arg name="debug" value="false"/>
</include>
<param name="robot_description" command="$(find xacro)/xacro --inorder $(find turtlebot3_description)/urdf/turtlebot3_$(arg model).urdf.xacro" />
<node name="spawn_urdf" pkg="gazebo_ros" type="spawn_model" args="-urdf -model turtlebot3_$(arg model) -x $(arg x_pos) -y $(arg y_pos) -z $(arg z_pos) -param robot_description" />
</launch>def img_callback(self, msg):
# Canny Edge Detection
# Blurs image and find intensity gradients
img = self.bridge.imgmsg_to_cv2(msg, 'bgr8')
gray = cv2.cvtColor(img, cv2.COLOR_RGB2GRAY)
kernelSize = 31grayBlur = cv2.GaussianBlur(gray, (kernelSize, kernelSize), 0)lowEnd = 30
highEnd = 100
edges = cv2.Canny(grayBlur, lowEnd, highEnd)
# Publish for use in finding centroid
self.img_pub.publish(self.bridge.cv2_to_imgmsg(edges))sudo apt install ros-$ROS_DISTRO-rgbd-launch ros-$ROS_DISTRO-libuvc ros-$ROS_DISTRO-libuvc-camera ros-$ROS_DISTRO-libuvc-rossudo apt install libuvc
sudo apt install ros-*-rgbd-launchroscd astra_camera
chmod 777 /scripts/create_udev_rules
./scripts/create_udev_rulesexport OPENCR_PORT=/dev/ttyACM0
export OPENCR_MODEL=burger_noetic# or waffle_noetic if you have a waffle tb3
rm -rf ./opencr_update.tar.bz2
wgethttps://github.com/ROBOTIS-GIT/OpenCR-Binaries/raw/master/turtlebot3/ROS1/latest/opencr_update.tar.bz2
tar -xvf opencr_update.tar.bz2
cd ./opencr_update
./update.sh $OPENCR_PORT $OPENCR_MODEL.opencrcd ~/catkin_ws/src
git clone https://github.com/bosch-ros-pkg/usb_cam.git
cd ..
catkin_make
source ~/catkin-ws/devel/setup.bashbringup
roslaunch astra_camera astra.launch
rosrun usb_cam usb_cam_node/camera/depth/camera_info
/camera/depth/image_raw
/camera/depth/points
/camera/depth_registered/points
/camera/extrinsic/depth_to_color
/camera/ir/camera_info
/camera/ir/image_raw
/camera/reset_deviceimport socket
host = <enter host IP here>
port = 5000
s = socket.socket(socket.AF_INET,socket.SOCK_STREAM)
s.bind(('', port))
s.listen(1)
c, addr = s.accept()
print("CONNECTION FROM:", str(addr))
c.send(b"HELLO, How are you ? Welcome to Akash hacking World")
msg = "Bye.............."
c.send(msg.encode())
c.close()
import socket
host = <Same IP>
port = 5000
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
s.connect(('127.0.0.1', port))
msg = s.recv(1024)
while msg:
print('Received:' + msg.decode())
msg = s.recv(1024)
s.close()#!/usr/bin/env python
import os
from socket import *
import rospy
from std_msgs.msg import String
from nav_msgs.msg import Odometry
# Because of transformations
import tf_conversions
import tf2_ros
import geometry_msgs.msg
import math
from geometry_msgs.msg import PoseWithCovarianceStamped
from tf.transformations import euler_from_quaternion, quaternion_from_euler
rospy.init_node("sender")
#100.71.173.127
#100.74.41.103
host = "100.71.173.127" # set to IP address of target computer
port = 13000
addr = (host, port)
UDPSock = socket(AF_INET, SOCK_DGRAM)
roll = 0.0
pitch = 0.0
yaw = 0.0
def get_rotation (msg):
if msg is not None:
global roll, pitch, yaw
orientation_q = msg.pose.pose.orientation
orientation_list = [orientation_q.x, orientation_q.y, orientation_q.z, orientation_q.w]
(roll, pitch, yaw) = euler_from_quaternion (orientation_list)
print('X =',msg.pose.pose.position.x, 'Y =',msg.pose.pose.position.y, 'Yaw =',math.degrees(yaw))
mess=str(msg.pose.pose.position.x)+" "+str(msg.pose.pose.position.y)
data = bytes(str(mess), 'utf-8')
UDPSock.sendto(data, addr)
sub = rospy.Subscriber ('/amcl_pose', PoseWithCovarianceStamped, get_rotation) # geometry_msgs/PoseWithCovariance pose
while not rospy.is_shutdown():
hi = "r"#!/usr/bin/env python
import os
from socket import *
import rospy
from std_msgs.msg import String
from nav_msgs.msg import Odometry
import json
from std_msgs.msg import Float64MultiArray
rospy.init_node("receiver")
mypub = rospy.Publisher('/other_odom', Float64MultiArray,queue_size = 10)
host = "100.74.41.103"
port = 13000
buf = 1024
addr = (host, port)
UDPSock = socket(AF_INET, SOCK_DGRAM)
UDPSock.bind(addr)
while not rospy.is_shutdown():
(data, addr) = UDPSock.recvfrom(buf)
data=data.decode('utf-8')
data=data.split()
x=data[0]
y=data[1]
x=float(x)
y=float(y)
my_msg = Float64MultiArray()
d=[x, y, 67.654236]
my_msg.data = d
mypub.publish(my_msg)
if data == "exit":
break
UDPSock.close()
os._exit(0)double error = target_speed - get_wheel_speed(); // assume this function has been written and returns motor speed in RPMstd_msgs/String[] list
std_msgs/Int16 int <group ns="seeker">
<param name="robot_description" command="$(find xacro)/xacro.py $(find turtlebot3_description)/urdf/turtlebot3_$(arg model).urdf.xacro" />
<node pkg="robot_state_publisher" type="robot_state_publisher" name="robot_state_publisher" output="screen">
<param name="publish_frequency" type="double" value="50.0" />
<param name="tf_prefix" value="seeker" />
</node>
<node name="spawn_urdf" pkg="gazebo_ros" type="spawn_model" args="-urdf -model seeker -x $(arg seeker_pos_x) -y $(arg seeker_pos_y) -z $(arg seeker_pos_z) -Y $(arg seeker_yaw) -param robot_description" />
</group>
<group ns="hider">
<param name="robot_description" command="$(find xacro)/xacro.py $(find turtlebot3_description)/urdf/turtlebot3_$(arg model).urdf.xacro" />
<node pkg="robot_state_publisher" type="robot_state_publisher" name="robot_state_publisher" output="screen">
<param name="publish_frequency" type="double" value="50.0" />
<param name="tf_prefix" value="hider" />
</node>
<node name="spawn_urdf" pkg="gazebo_ros" type="spawn_model" args="-urdf -model hider -x $(arg hider_pos_x) -y $(arg hider_pos_y) -z $(arg hider_pos_z) -Y $(arg hider_yaw) -param robot_description" />
</group>





def __init__(self):
self.counter = 1
def image_callback(self, msg):
# set frame rate 1/3
if self.counter % 3 != 0:
self.counter += 1
return
else:
self.counter = 1def preprocessor(all_ranges):
ranges = [float[(‘inf’)] * 20
ranges _index = 0
index = -9
sum = 0
batch = 0
actual = 0
for i in range(360):
curr = all_ranges[index]
if curr != float(‘inf” and not isnan(curr):
sum += curr
actual += 1
batch += 1
index += 1
if batch == 18:
if actual != 0:
ranges[ranges_index] = sum/actual
ranges_index += 1
sum = 0
batch = 0
actual = 0
return ranges<joint name="camera_joint" type="fixed">
<origin xyz="0 0 ${1+base_height}" rpy="0 ${-cam_pitch} 0" />
<parent link="base_link"/>
<child link="camera_link" />
</joint>sudo chmod a+rw /dev/ttyACM0rosrun rosserial_python serial_node.py _port="/dev/ttyACM0" _baud=57600double p = error * Ki;double i;
...
i += Ki * error * change_in_time();double d = Kd * (error - pervious_error) / change_in_time();
previous_error = error;double pid = p + i + d;
command = previous_command + pid;
previous_command = command;Bus 001 Device 003: ID 0403:6014 Future Technology Devices International, Ltd FT232H Single HS USB-UART/FIFO ICsudo apt install python3-colcon-common-extensionssudo apt install python3-rosdepsudo rosdep init
rosdep updatesudo apt install curl
curl 'https://raw.githubusercontent.com/Interbotix/interbotix_ros_manipulators/main/interbotix_ros_xsarms/install/amd64/xsarm_amd64_install.sh' > xsarm_amd64_install.sh
chmod +x xsarm_amd64_install.sh
./xsarm_amd64_install.sh -d humbleros2 launch interbotix_xsarm_control xsarm_control.launch.py robot_model:=px100ros2 service call /px100/torque_enable interbotix_xs_msgs/srv/TorqueEnable "{cmd_type: 'group', name: 'all', enable: false}" if len(contours) != 0:
# draw in blue the contours that were founded
cv2.drawContours(output, contours, -1, 255, 3)
# find the biggest countour (c) by the area
c = max(contours, key = cv2.contourArea)
x,y,w,h = cv2.boundingRect(c)
cx = x + (w/2)
cy = y + (h/2)
# draw the biggest contour (c) in green
cv2.rectangle(output,(x,y),(x+w,y+h),(0,255,0),2) if (y_transform > .5):
y_transform += .05
elif (y_transform > .6):
y_tranfrom += .09angular_states_prob = { -.5: .5, -.45: .5, -.4: .5, -.35: .5, -.3: .5, -.25: .5, -.2: .5, -.15: .5, -.1: .5, -.05: .5, 0 : .5, .05 : .5, .1 : .5, .15 : .5, .2: .5, .25: .5, .3: .5, .35: .5, .4: .5, .45: .5, .5: .5 }
linear_states_prob = { 0.1 : .5, 0.25 : .5, 0.4 : .5 }moments = cv2.moments(mask_yellow)
if moments['m00'] > 0:
cx = int(moments['m10']/moments['m00'])
cy = int(moments['m01']/moments['m00'])angle = (float(cx)/1570.0 - 0.5) * -1.0for value in angular_states_prob.keys():
angleSum += angular_states_prob[value]
if abs(value - angle) < 0.05:
angular_states_prob[value] = angular_states_prob[value]*5
elif abs(value - angle) < 0.1:
angular_states_prob[value] = angular_states_prob[value]*2#!/usr/bin/env python
import rospy
from sensor_msgs.msg import CompressedImage
from cv_bridge import CvBridge
def image_callback(msg):
# Convert the compressed image message to a cv2 image
bridge = CvBridge()
cv2_image = bridge.compressed_imgmsg_to_cv2(msg, desired_encoding='passthrough')
# Process the image (e.g., resize, blur, etc.)
processed_image = process_image(cv2_image)
# Convert the processed cv2 image back to a compressed image message
compressed_image_msg = bridge.cv2_to_compressed_imgmsg(processed_image)
# Publish the processed image on the new topic
processed_image_pub.publish(compressed_image_msg)
def process_image(cv2_image):
# Perform image processing (e.g., resize, blur, etc.) and return the processed image
# Example: resized_image = cv2.resize(cv2_image, (new_width, new_height))
return cv2_image
if __name__ == '__main__':
rospy.init_node('image_processing_node')
rospy.Subscriber('/robot/camera_topic', CompressedImage, image_callback)
processed_image_pub = rospy.Publisher('/processed_image_topic', CompressedImage, queue_size=1)
rospy.spin()def averager(direction_ranges):
real = 0
sum = 0
for i in ranges:
if i != float(‘inf’) and not isnan(i):
real += 1
sum += 1
return float(‘inf’) if real == 0 else sum/realdef cardinal_directions(ranges):
directions = {“Front”: float(‘inf’), “Back”: float(‘inf’), “Left”: float(‘inf’), “Right”: float(‘inf’)}
plus_minus = 9
Front = ranges[-plus_minus:] + ranges[:plus_minus]
Backward = ranges[180 - plus_minus:180+plus_minus]
Left = ranges[90 - plus_minus:90+plus_minus]
Right = ranges[270-plus_minus:270 + plus_minus]
directions_before_processed = {“”Front”: Front, “Back”: Back, “Left”: Left, “Right”: Right}
for direction, data in directions_before_processed:
directions[direction] = averager(data)
return directionscatkin_package(
CATKIN_DEPENDS message_runtime
)from package_name.msg import message_name as message_namefrom std_msgs.msg import Stringself.detect_intruder_pub = rospy.Publisher('/see_intruder', see_intruder, queue_size=1)
self.detect_intruder_sub=rospy.Subscriber('/see_intruder', see_intruder,self.see_intruder_callback)
msg.list[msg.int.data].datasource ~/catkin_ws/devel/setup.bashsudo apt install ros-noetic-rosbridge-server<script src="https://static.robotwebtools.org/roslibjs/current/roslib.min.js"></script><script src="PATH-TO-DOWNLOADED-SCRIPT"></script>import React, { Component } from 'react';
import { Alert } from 'react-bootstrap';
class ROSConnect extends Component {
constructor() {
super()
this.state = { connected: false, ros: null }
}
// run the function as soon as the page renders
componentDidMount() {
this.init_connection()
}
// a function to connect to the robot using ROSLIBJS
init_connection() {
this.state.ros = new window.ROSLIB.Ros()
this.state.ros.on("connection", () => {
this.setState({connected: true})
})
this.state.ros.on("close", () => {
this.setState({connected: false})
// try to reconnect to rosbridge every 3 seconds
setTimeout(() => {
try{
// ip address of the rosbridge server and port
this.state.ros.connect('ws://127.0.0.1:9090')
}catch (error) {
console.log("connection error:", error);
}
// if the robot disconnects try to reconnect every 3 seconds (1000 ms = 1 second)
}, 3000);
})
try{
// connect to rosbridge using websocket
this.state.ros.connect('ws://127.0.0.1:9090')
}catch (error) {
console.log("connection error:", error);
}
}
render() {
return (
// a Alert component from react-bootstrap showing if the robot is connected or not
<div>
<Alert className='text-center m-3' variant={this.state.connected ? "success" : "danger"}>
{this.state.connected ? "Robot Connected" : "Robot Disconnected"}
</Alert>
</div>
);
}
}
export default ROSConnect;sudo docker --versionDocker version 25.0.3, build 4debf41docker run -p 6080:80 --security-opt seccomp=unconfined --shm-size=512m tiryoh/ros2-desktop-vnc:irondocker ps -adocker rm NAME$ rostopic pub -r 10 /cmd_vel geometry_msgs/Twist '{linear: {x: 0.1, y: 0.0, z: 0.0}, angular: {x: 0.0,y: 0.0,z: 0.0}}'$ cd .\Downloads\
$ tar -xvkf .\pa2.tar.gz git add . //Alternatively, you can specify a folder or file here.
git commit -m "some commit message"
git pull // updates your local to take any new changes.
git push //pushes your changes to origin/github. import rospy
from geometry_msgs.msg import TransformStamped, Twist
from std_msgs.msg import Bool, String # if you get errors for creating your own publisher, see if you have the right type here.
Class Classname:
def __init__(self):
rospy.init_node('main')
self.linear_speed = 0.0
self.rate = rospy.Rate(30) # reminder: this must be self. too.
self.twist = Twist()
# CORE pub subs ====
self.cmd_pub = rospy.Publisher("/cmd_vel", Twist, queue_size=1)
self.state_sub = rospy.Subscriber("/state", String, self.state_cb) #self.state_cb
def state_cb(self, msg):
self.state = msg.data
# when accessing the information, you most likely will need to access .data
def funcname(self):
self.linear_speed = 0.3 # This is where you might see the error if you forgot self.
self.twist.linear.x = self.linear_speed
self.twist.angular.z = 0.0
self.cmd_pub.publish(self.twist)
def run(self):
while not rospy.is_shutdown():
self.funcname()
self.rate.sleep()
if __name__ == '__main__':
c = Classname()
c.run()
# EOFlet ros_config = {
ROSBRIDGE_SERVER_IP: "127.0.0.1",
ROSBRIDGE_SERVER_PORT: "9090",
RECONNECTION_TIMEOUT: 3000,
CHECK_IMAGE_CONFIG: 3000,
ROSBRIDGE_BATTERY_STATE_THROTTLE: 5000,
ROSBRIDGE_CMD_VEL: "/cmd_vel",
ROSBRIDGE_ODOM: "/odom",
ROSBRIDGE_CAMERA_TOPIC: "/camera/rgb/image_raw/compressed",
ROSBRIDGE_RASPICAM_TOPIC: "/raspicam_node/image_res/compressed",
ROSBRIDGE_IMAGE_CONFIGS: "/image_configs",
ROSBRIDGE_ROSTOPICS_LIST: "/rostopic_list",
ROSBRIDGE_IMAGE_WIDTH: "426",
ROSBRIDGE_IMAGE_HEIGHT: "240",
ROSBRIDGE_FRAME_WIDTH: 426,
ROSBRIDGE_FRAME_HEIGHT: 240,
ROSBRIDGE_BATTERY_TOPIC: "/battery_state",
ROSBRIDGE_MANUAL_TELEOP: false,
ROSBRIDGE_BATTERY_STATUS: true,
}
export default ros_configthis.state = {
...
rosbridgeServerIP: localStorage.getItem('rosbridgeServerIP') || ros_config.ROSBRIDGE_SERVER_IP,
rosbridgeServerPort: localStorage.getItem('rosbridgeServerPort') || ros_config.ROSBRIDGE_SERVER_PORT,
imageWidth: localStorage.getItem('imageWidth') || ros_config.ROSBRIDGE_IMAGE_WIDTH,
imageHeight: localStorage.getItem('imageHeight') || ros_config.ROSBRIDGE_IMAGE_HEIGHT,
frameWidth: localStorage.getItem('frameWidth') || ros_config.ROSBRIDGE_FRAME_WIDTH,
frameHeight: localStorage.getItem('frameHeight') || ros_config.ROSBRIDGE_FRAME_HEIGHT,
batteryStatus: localStorage.getItem('batteryStatus') !== null ? localStorage.getItem('batteryStatus') === "true" : ros_config.ROSBRIDGE_BATTERY_STATUS,
manualTeleop: localStorage.getItem('manualTeleop') !== null ? localStorage.getItem('manualTeleop') === "true" : ros_config.ROSBRIDGE_MANUAL_TELEOP,
...
}localStorage.setItem('rosbridgeServerIP', this.state.rosbridgeServerIP);localStorage.clear();<robot name="ball">
<link name="base_link">
<visual>
<origin xyz="0 0 0" rpy="0 0 0"/>
<geometry>
<sphere radius=".5"/>
</geometry>
</visual>
<collision>
<origin xyz="0 0 0" rpy="0 0 0"/>
<geometry>
<sphere radius=".5"/>
</geometry>
</collision>
<inertial>
<origin xyz="0 0 0" rpy="0 0 0"/>
<mass value="1"/>
<inertia ixx="8e-03" ixy="-4-05" ixz="1e-04"
iyy="8e-03" iyz="-4-06"
izz="8e-03" />
</inertial>
</link>
<gazebo reference="base_link">
<mu1>1</mu1>
<mu2>1</mu2>
<kp>500000</kp>
<kd>0</kd>
<minDepth>0</minDepth>
<maxVel>1000</maxVel>
</gazebo>
</robot><xacro:include filename="$(find turtlebot3_description)/urdf/common_properties.xacro"/>
<xacro:include filename="$(find turtlebot3_description)/urdf/turtlebot3_waffle.gazebo.xacro"/>./utils/xml_2_txtdef open():
self.pub.publish(Bool(True))
def close():
self.pub.publish(Bool(False))
# export ROS_MASTER_URI=http://100.74.60.34:11311
export ROS_MASTER_URI=http://100.66.118.56:11311
# export ROS_IP=100.66.118.56
export ROS_IP=100.74.60.34
# Settings for a physical robot
$(bru mode real)
# $(bru name robb -m 100.99.186.125)
# $(bru name robc -m 100.117.252.97)
$(bru name robc -m $(myvpnip))
# $(bru name robb -m $(myvpnip))rosservice call /gazebo/set_model_state '{model_state: { model_name: MODEL+NAME, pose: { position: { x: 0, y: 0 ,z: 0 }, orientation: {x: 0, y: 0, z: 0, w: 0 } }, twist: { linear: {x: 0.0 , y: 0 ,z: 0 } , angular: { x: 0.0 , y: 0 , z: 0.0 } } , reference_frame: world } }'git clone https://github.com/ultralytics/yolov3
# bash yolov3/data/get_coco_dataset.sh
git clone https://github.com/cocodataset/cocoapi && cd cocoapi/PythonAPI && make && cd ../.. && cp -r cocoapi/PythonAPI/pycocotools yolov3
cd yolov3
python3 test.py --save-json --img-size 416
Namespace(batch_size=32, cfg='cfg/yolov3-spp.cfg', conf_thres=0.001, data_cfg='data/coco.data', img_size=416, iou_thres=0.5, nms_thres=0.5, save_json=True, weights='weights/yolov3-spp.weights')
Using CUDA device0 _CudaDeviceProperties(name='Tesla V100-SXM2-16GB', total_memory=16130MB)
Class Images Targets P R mAP F1
Calculating mAP: 100%|█████████████████████████████████████████| 157/157 [05:59<00:00, 1.71s/it]
all 5e+03 3.58e+04 0.109 0.773 0.57 0.186
Average Precision (AP) @[ IoU=0.50:0.95 | area= all | maxDets=100 ] = 0.335
Average Precision (AP) @[ IoU=0.50 | area= all | maxDets=100 ] = 0.565
Average Precision (AP) @[ IoU=0.75 | area= all | maxDets=100 ] = 0.349
Average Precision (AP) @[ IoU=0.50:0.95 | area= small | maxDets=100 ] = 0.151
Average Precision (AP) @[ IoU=0.50:0.95 | area=medium | maxDets=100 ] = 0.360
Average Precision (AP) @[ IoU=0.50:0.95 | area= large | maxDets=100 ] = 0.493
Average Recall (AR) @[ IoU=0.50:0.95 | area= all | maxDets= 1 ] = 0.280
Average Recall (AR) @[ IoU=0.50:0.95 | area= all | maxDets= 10 ] = 0.432
Average Recall (AR) @[ IoU=0.50:0.95 | area= all | maxDets=100 ] = 0.458
Average Recall (AR) @[ IoU=0.50:0.95 | area= small | maxDets=100 ] = 0.255
Average Recall (AR) @[ IoU=0.50:0.95 | area=medium | maxDets=100 ] = 0.494
Average Recall (AR) @[ IoU=0.50:0.95 | area= large | maxDets=100 ] = 0.620
python3 test.py --save-json --img-size 608 --batch-size 16
Namespace(batch_size=16, cfg='cfg/yolov3-spp.cfg', conf_thres=0.001, data_cfg='data/coco.data', img_size=608, iou_thres=0.5, nms_thres=0.5, save_json=True, weights='weights/yolov3-spp.weights')
Using CUDA device0 _CudaDeviceProperties(name='Tesla V100-SXM2-16GB', total_memory=16130MB)
Class Images Targets P R mAP F1
Computing mAP: 100%|█████████████████████████████████████████| 313/313 [06:11<00:00, 1.01it/s]
all 5e+03 3.58e+04 0.12 0.81 0.611 0.203
Average Precision (AP) @[ IoU=0.50:0.95 | area= all | maxDets=100 ] = 0.366
Average Precision (AP) @[ IoU=0.50 | area= all | maxDets=100 ] = 0.607
Average Precision (AP) @[ IoU=0.75 | area= all | maxDets=100 ] = 0.386
Average Precision (AP) @[ IoU=0.50:0.95 | area= small | maxDets=100 ] = 0.207
Average Precision (AP) @[ IoU=0.50:0.95 | area=medium | maxDets=100 ] = 0.391
Average Precision (AP) @[ IoU=0.50:0.95 | area= large | maxDets=100 ] = 0.485
Average Recall (AR) @[ IoU=0.50:0.95 | area= all | maxDets= 1 ] = 0.296
Average Recall (AR) @[ IoU=0.50:0.95 | area= all | maxDets= 10 ] = 0.464
Average Recall (AR) @[ IoU=0.50:0.95 | area= all | maxDets=100 ] = 0.494
Average Recall (AR) @[ IoU=0.50:0.95 | area= small | maxDets=100 ] = 0.331
Average Recall (AR) @[ IoU=0.50:0.95 | area=medium | maxDets=100 ] = 0.517
Average Recall (AR) @[ IoU=0.50:0.95 | area= large | maxDets=100 ] = 0.618#!/usr/bin/env python3
import rospy
from std_msgs.msg import Bool
class Pincer:
""" Allows pincer to open and close """
def __init__(self):
self.pub = rospy.Publisher('/servo', Bool, queue_size=1)
def open(self):
self.pub.publish(Bool(True))
def close(self):
self.pub.publish(Bool(False))<transmission name="tran">
<type>transmission_interface/SimpleTransmission</type>
<joint name="joint">
<hardwareInterface>hardware_interface/VelocityJointInterface</hardwareInterface>
</joint>
<actuator name="motor">
<hardwareInterface>hardware_interface/VelocityJointInterface</hardwareInterface>
<mechanicalReduction>1</mechanicalReduction>
</actuator>
</transmission><visual>tag. Note the uri sexport GAZEBO_MODEL_PATH=~/catkin_ws/src/warehouse_worker/model/hould be correctly set and the name of the material should be identical to the name defined in cargo.material

turtlebot3_slamset_base_frameset_odom_frameset_map_framebase_footprint$(arg ns)/base_footprint$(aarg ns)if os.name != 'nt':
settings = termios.tcgetattr(sys.stdin)
key = getKey()
if key == 'z': #h for human
state = 'use-teleop'<gazebo>
<plugin name="p3d_base_controller" filename="libgazebo_ros_p3d.so">
<alwaysOn>true</alwaysOn>
<updateRate>50.0</updateRate>
<bodyName>base_footprint</bodyName>
<topicName>base_footprint_odom</topicName>
<gaussianNoise>0.01</gaussianNoise>
<frameName>world</frameName>
<xyzOffsets>0 0 0</xyzOffsets>
<rpyOffsets>0 0 0</rpyOffsets>
</plugin>
</gazebo>try:
robot_trans = tfBuffer.lookup_transform(‘robot1’, ‘robot2’, rospy.Time())
except (tf2_ros.LookupException, tf2_ros.ConnectivityException, tf2_ros.ExtrapolationException):
continue<your_model_name>
<material>
<material1.png>
<material2.png>
<etc...>
<meshes>
<your_model.dae>
<model.sdf>
<model.config><include file="$(find turtlebot3_bringup)/launch/turtlebot3_robot.launch"/><node pkg="raspicam_node" type="raspicam_node" name="raspicam_node" output="screen">
<param name="camera_info_url" value="package://turtlebot3_bringup/camera_info/turtlebot3_rpicamera.yaml"/>
<param name="width" value="640"/>
<param name="height" value="480"/>
<param name="framerate" value="50"/>
<param name="enable_raw" value="true"/>
<param name="camera_frame_id" value="camera"/>
</node><node pkg="cr_ros_2" type="scan_filter.py" name="scan_filter" output="screen"></node>
<node pkg="cr_ros_2" type="detect_pickup.py" name="pickup_checker" output="screen"></node>
<node pkg="cr_ros_2" type="rover_controller.py" name="rover_controller" output="screen"></node>
<node pkg="cr_ros_2" type="state.py" name="state" output="screen"></node><arg name="map_file" default="$(find cr_ros_2)/files/basement_map.yaml"/><include file="$(find cr_ros_2)/launch/mutant_navigation.launch">
<arg name="map_file" value="$(arg map_file)"/>
<arg name="scan_topic" value="scan_filter"/>
<arg name="open_rviz" value="true"/>
<arg name="move_forward_only" value="false"/>
</include><node pkg="topic_tools" type="throttle" name="cam_throttle" args="messages /$(env ROS_NAMESPACE)/raspicam_node/image/compressed 2" /><node pkg="aruco_detect" name="aruco_detect"
type="aruco_detect" respawn="false">
<param name="image_transport" value="$(arg transport)"/>
<param name="publish_images" value="true" />
<param name="fiducial_len" value="$(arg fiducial_len)"/>
<param name="dictionary" value="$(arg dictionary)"/>
<param name="do_pose_estimation" value="true"/>
<remap from="/camera/compressed"
to="$(arg camera)/$(arg image)/$(arg transport)_throttle"/> <!-- removed throttle -->
<remap from="/camera_info" to="$(arg camera)/camera_info"/>
<remap from="/fiducial_transforms" to="/$(env ROS_NAMESPACE)/fiducial_transforms" />
</node><node pkg="tf" type="static_transform_publisher" name="fid_153" args="19.6 21.8 0.825 0 3.14159 0 /map /fid_153 100" /> <!-- charging dock --># This is how the python file gets the parameter. notice the paramter is namespaced to the name of the node
cam_topic = rospy.get_param("/rover_controller/cam_topic")<!-- This is how the paramter is passed into the node-->
<node pkg="cr_ros_3" type="rover_controller.py" name="rover_controller" output="screen">
<param name="cam_topic" value="/usb_cam_node/image_raw/compressed"/>
</node><arg name="voice" default="true"/>
<group if="$(arg voice)">
<include file="$(find cr_ros_3)/launch/voice.launch"/>
</group><group if="$(eval arg('robot') == 'ALIEN')"><include file="$(dirname)/B.launch"><node pkg="cr_ros_3" type="camera_reconfigure.sh" name="camera_reconfigure" output="screen"/><?xml version="1.0"?>
<model>
<name>cargo_with_marker</name>
<version>1.0</version>
<sdf version="1.6">model.sdf</sdf>
<author>
<name>Tongkai Zhang</name>
</author>
<description>export GAZEBO_MODEL_PATH=~/catkin_ws/src/warehouse_worker/model/
Cargo
</description>
</model> <model name='cargo_with_marker'>
<pose>2.5 0 0.15 0 0 -1.57</pose>
<include>
<uri>model://cargo</uri>
</include>
</model>material Cargo/Diffuse
{
receive_shadows off
technique
{
pass
{
texture_unit
{
texture cargo.jpg
}
}
}
}
material Marker/Diffuse
{
receive_shadows off
technique
{
pass
{
texture_unit
{
texture MarkerData_1.png
}
}
}
}<visual>
<material>
<script>
<uri>model://cargo/materials/scripts</uri>
<uri>model://cargo/materials/textures</uri>
<name>Marker/Diffuse</name>
</script>
</material>
</visual><node pkg="gmapping" type="slam_gmapping" name="turtlebot3_slam_gmapping" output="log">
<param name="base_frame" value="$(arg ns)/base_footprint"/>
<param name="odom_frame" value="$(arg ns)/odom"/>
<param name="map_frame" value="$(arg ns)/map"/>
</node><node name="map_saver" pkg="map_server" type="map_saver"
args="-f $(find swarmbots)/maps/robot$(arg newrobots)" output="screen">
<param name="map" value="/robot$(arg newrobots)/dynamic_map"/>
</node><param name="tf_prefix" value="$(arg ns)" />BURGER_MAX_LIN_VEL = 0.22
BURGER_MAX_ANG_VEL = 2.84
WAFFLE_MAX_LIN_VEL = 0.26
WAFFLE_MAX_ANG_VEL = 1.82
LIN_VEL_STEP_SIZE = 0.01
ANG_VEL_STEP_SIZE = 0.1
msg = """
Control Your TurtleBot3!
---------------------------
Moving around:
w
a s d
x
w/x : increase/decrease linear velocity (Burger : ~ 0.22, Waffle and Waffle Pi : ~ 0.26)
a/d : increase/decrease angular velocity (Burger : ~ 2.84, Waffle and Waffle Pi : ~ 1.82)
space key, s : force stop
CTRL-C to quit
"""
e = """
Communications Failed
"""
def getKey():
if os.name == 'nt':
if sys.version_info[0] >= 3:
return msvcrt.getch().decode()
else:
return msvcrt.getch()
tty.setraw(sys.stdin.fileno())
rlist, _, _ = select.select([sys.stdin], [], [], 0.1)
if rlist:
key = sys.stdin.read(1)
else:
key = ''
termios.tcsetattr(sys.stdin, termios.TCSADRAIN, settings)
return key
def vels(target_linear_vel, target_angular_vel):
return "currently:\tlinear vel %s\t angular vel %s " % (target_linear_vel,target_angular_vel)
def makeSimpleProfile(output, input, slop):
if input > output:
output = min( input, output + slop )
elif input < output:
output = max( input, output - slop )
else:
output = input
return output
def constrain(input, low, high):
if input < low:
input = low
elif input > high:
input = high
else:
input = input
return input
def checkLinearLimitVelocity(vel):
vel = constrain(vel, -BURGER_MAX_LIN_VEL, BURGER_MAX_LIN_VEL)
return vel
def checkAngularLimitVelocity(vel):
vel = constrain(vel, -BURGER_MAX_ANG_VEL, BURGER_MAX_ANG_VEL)
return vel
if state=='use-teleop':
if rospy.Time.now().to_sec()-time_switch.to_sec()>10:
inc_x = posex2 -posex1
inc_y = posey2 -posey1
angle_to_goal = atan2(inc_y, inc_x)
z=math.sqrt((inc_x*inc_x)+(inc_y*inc_y))
if z > .05:
if z < .3:
twist.linear.x=0
twist.angular.z=0
state="cop"
time_switch=rospy.Time.now()
if os.name != 'nt':
settings = termios.tcgetattr(sys.stdin)
#rospy.init_node('turtlebot3_teleop')
cmd_vel_msg = '/cmd_vel'
cmd_vel_pub = rospy.Publisher(cmd_vel_msg, Twist, queue_size=10)
turtlebot3_model = rospy.get_param("model", "burger")
cmd_vel_pub.publish(twist)
inc_x = posex2 -posex1
inc_y = posey2 -posey1
status = 0
target_linear_vel = 0.0
target_angular_vel = 0.0
control_linear_vel = 0.0
control_angular_vel = 0.0
try:
print(msg)
while(1):
key = getKey()
if key == 'w' :
target_linear_vel = checkLinearLimitVelocity(target_linear_vel + LIN_VEL_STEP_SIZE)
status = status + 1
print(vels(target_linear_vel,target_angular_vel))
elif key == 'x' :
target_linear_vel = checkLinearLimitVelocity(target_linear_vel - LIN_VEL_STEP_SIZE)
status = status + 1
print(vels(target_linear_vel,target_angular_vel))
elif key == 'a' :
target_angular_vel = checkAngularLimitVelocity(target_angular_vel + ANG_VEL_STEP_SIZE)
status = status + 1
print(vels(target_linear_vel,target_angular_vel))
elif key == 'd' :
target_angular_vel = checkAngularLimitVelocity(target_angular_vel - ANG_VEL_STEP_SIZE)
status = status + 1
print(vels(target_linear_vel,target_angular_vel))
elif key == ' ' or key == 's' :
target_linear_vel = 0.0
control_linear_vel = 0.0
target_angular_vel = 0.0
control_angular_vel = 0.0
print(vels(target_linear_vel, target_angular_vel))
elif key == 'r':
state = 'robber'
break
else:
if (key == '\x03'):
break
if status == 20 :
print(msg)
status = 0
twist = Twist()
control_linear_vel = makeSimpleProfile(control_linear_vel, target_linear_vel, (LIN_VEL_STEP_SIZE/2.0))
twist.linear.x = control_linear_vel; twist.linear.y = 0.0; twist.linear.z = 0.0
control_angular_vel = makeSimpleProfile(control_angular_vel, target_angular_vel, (ANG_VEL_STEP_SIZE/2.0))
twist.angular.x = 0.0; twist.angular.y = 0.0; twist.angular.z = control_angular_vel
cmd_vel_pub.publish(twist)
except:
print(e)
finally:
twist = Twist()
twist.linear.x = 0.0; twist.linear.y = 0.0; twist.linear.z = 0.0
twist.angular.x = 0.0; twist.angular.y = 0.0; twist.angular.z = 0.0
cmd_vel_pub.publish(twist)
if os.name != 'nt':
termios.tcsetattr(sys.stdin, termios.TCSADRAIN, settings)cd ~/catkin_ws/src/
git clone https://github.com/leggedrobotics/darknet_roscd ~/catkin_ws
catkin_make<include file="$(find darknet_ros)/launch/darknet_ros.launch">
<arg name="image" value="rapicam_node/image/raw"/>
</include>cd DIRECTORY_TO_DARKNET_ROS/darknet_ros/yolo_network_config/weights
wget http://pjreddie.com/media/files/yolov2-tiny.weightsroslaunch [your_package_name] [your_launch_file.launch]from darknet_ros_msgs.msg import BoundingBoxes, BoundingBox
class ObjectRecognizer
def __init__(self):
self.boxes = []
self.box_sub = box_sub = rospy.Subscriber(topics.BOUNDING_BOXES, BoundingBoxes, self.get_bounding_box_cb())
def get_bounding_box_cb(self)
def cb(msg: BoundingBoxes):
self.boxes = msg.bounding_boxes
for box in self.boxes:
print("box_class: {}".format(box.Class))
print("box_x_min: {}".format(box.xmin))
print("box_x_max: {}".format(box.xmax))
print("box_y_min: {}".format(box.ymin))
print("box_y_max: {}".format(box.ymax))
print()<include file="$(find darknet_ros)/launch/darknet_ros.launch">
<arg name="image" value="rapicam_node/image/compressed"/>
</include>void YoloObjectDetector::cameraCallback(const sensor_msgs::CompressedImageConstPtr& msg) {
ROS_DEBUG("[YoloObjectDetector] USB image received.");rosrun gazebo_ros spawn_model -file `rospack find MYROBOT_description`/urdf/MYROBOT.urdf -urdf -x 0 -y 0 -z 1 -model MYROBOTrosrun xacro xacro `rospack find rrbot_description`/urdf/MYROBOT.xacro >> `rospack find rrbot_description`/urdf/MYROBOT.xml




p = command_dictionary['ls -l']
p.terminate()rosrun gazebo_ros spawn_model -file `rospack find rrbot_description`/urdf/MYROBOT.xml -urdf -y 1 -model rrbot1 -robot_namespace rrbot1$(bru name ROBOTNAME -m 127.0.0.1)<launch>
<arg> lorem ipsum dolor sit amet ... </launch>
<arg> lorem ipsum dolor sit amet ... </launch>
<group ns="$(arg multi_robot_name)">
<stuff> </stuff>
<stuff> </stuff>
<stuff> </stuff>
<stuff> </stuff>
</group>
</launch>import roslibpy
client = roslibpy.Ros(host='localhost', port=9090)
client.run()
pub = roslibpy.Topic(client, topic_name, message_type)
pub.publish(roslibpy.Message(message_data))
pub.unadvertise()
client.terminate()"command": {
"receiver": "/cmd_vel",
"type": "geometry_msgs/Twist",
"msg" : {
"linear": {
"x": 0.0,
"y": 0.0,
"z": 0.0
},
"angular": {
"x": 0.0,
"y": 0.0,
"z": 0.0
}
}
}roslaunch rosbridge_server rosbridge_websocket.launch
rosrun package_name node_name.py spawn_model_client = rospy.ServiceProxy('/gazebo/spawn_sdf_model', SpawnModel)
spawn_model_client(
model_name='TESTCYLINDER',
model_xml=open('/my_ros_data/catkin_ws/src/ros-autonomous-pacman/models/TESTCYLINDER/TESTCYLINDER.urdf', 'r').read(),
initial_pose=Pose(),
reference_frame='world'
)rate = rospy.Rate(20)
while not rospy.is_shutdown():
# your code
rate.sleep()rate = rospy.Rate(20)
while not rospy.is_shutdown():
if robot_state == 'follower':
follow()
elif robot_state == 'leader':
lead()
rate.sleep()def lead():
if send_messages_thread is None:
send_messages_thread = Thread(target = send_messages, daemon = True)
send_messages_thread.start()
else:
complex_function()
def send_messages():
while True:
your_publisher.publish('message')def send_messages():
while not threadA_told_me_to_stop:
your_publisher.publish('message')dynamodb = boto3.resource('dynamodb')delete_item()
delete_table()
put_item()
query()
update_item()
update_table()class Credentials:
AWS_ACCESS_KEY_ID = ''
SECRET_ACCESS_KEY = ''
OWNER_ID = ''





import rospy
from std_srvs.srv import Empty
rospy.wait_for_service('/gazebo/reset_world')
reset_world = rospy.ServiceProxy('/gazebo/reset_world', Empty)
reset_world()<launch>
<arg name="robot_name"/>
<arg name="init_pose"/>
<arg name="team"/>
<arg name="type"/>
<group if="$(eval team == 'Red')">
<group if="$(eval type == 'painter')">
<param name="robot_description"
command="$(find xacro)/xacro.py $(find robopaint)/urdf/red/painterbot_red_burger.urdf.xacro" />
</group>
<group if="$(eval type == 'attacker')">
<param name="robot_description"
command="$(find xacro)/xacro.py $(find robopaint)/urdf/red/attackerbot_red_burger.urdf.xacro" />
</group>
</group>
<group if="$(eval team == 'Blue')">
<group if="$(eval type == 'painter')">
<param name="robot_description"
command="$(find xacro)/xacro.py $(find robopaint)/urdf/blue/painterbot_blue_burger.urdf.xacro" />
</group>
<group if="$(eval type == 'attacker')">
<param name="robot_description"
command="$(find xacro)/xacro.py $(find robopaint)/urdf/blue/attackerbot_blue_burger.urdf.xacro" />
</group>
</group>
node name="spawn_minibot_model" pkg="gazebo_ros" type="spawn_model"
args="$(arg init_pose) -urdf -param robot_description -model $(arg robot_name)"
respawn="false" output="screen" />
<node pkg="robot_state_publisher" type="robot_state_publisher"
name="robot_state_publisher" output="screen"/>
</launch>

































