pax_global_header00006660000000000000000000000064134474464420014526gustar00rootroot0000000000000052 comment=e464cf8ccf4458bcc64fb763f906ee54eb0ce984 libais-master/000077500000000000000000000000001344744644200136265ustar00rootroot00000000000000libais-master/.codeclimate.yml000066400000000000000000000001521344744644200166760ustar00rootroot00000000000000languages: Ruby: false JavaScript: false PHP: false Python: true exclude_paths: - "third_party/**"libais-master/.gitignore000066400000000000000000000002661344744644200156220ustar00rootroot00000000000000*.o *.so *.a [0-9]* build *.dSYM *.pyc nohup.out foo* test_libais *~ *.egg-info *core* _ais.so dist .idea/ venv/ test/data/*.gpsdecode.json .eggs venv2 .cache .coverage .DS_Store ve libais-master/.travis.yml000066400000000000000000000011331344744644200157350ustar00rootroot00000000000000sudo: false dist: xenial language: python env: global: - PIP_WHEEL_DIR=$HOME/.cache/pip/wheels - PIP_FIND_LINKS=file://$HOME/.cache/pip/wheels cache: directories: - ~/.cache/pip addons: apt: sources: - ubuntu-toolchain-r-test packages: - gcc-6 - g++-6 python: - 3.7 before_install: - pip install pip setuptools --upgrade - pip install pip pytest --upgrade install: - CC=g++-6 pip install .\[tests\] --upgrade script: - (cd src && CC=gcc-6 CXX=g++-6 make -f Makefile-custom -j 4 test) - py.test ais test --cov=ais --cov-report term-missing libais-master/AUTHOR.txt000066400000000000000000000001031344744644200153630ustar00rootroot00000000000000Kurt Schwehr Kurt Schwehr libais-master/CMakeLists.txt000066400000000000000000000012561344744644200163720ustar00rootroot00000000000000# -*- shell-script -*- make up a mode for emacs # To use cmake: # # cmake . # make cmake_minimum_required (VERSION 2.8) project (libais) include(CheckCXXCompilerFlag) CHECK_CXX_COMPILER_FLAG("-std=c++11" COMPILER_SUPPORTS_CXX11) CHECK_CXX_COMPILER_FLAG("-std=c++0x" COMPILER_SUPPORTS_CXX0X) if(COMPILER_SUPPORTS_CXX11) set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -std=c++11") elseif(COMPILER_SUPPORTS_CXX0X) set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -std=c++0x") else() message(STATUS "The compiler ${CMAKE_CXX_COMPILER} has no C++11 support. Please use a different C++ compiler.") endif() # include_directories("${PROJECT_BINARY_DIR}/src/libais") add_subdirectory(src) libais-master/Changelog.md000066400000000000000000000166651344744644200160550ustar00rootroot00000000000000Changelog and release history for libais ======================================== 0.17 - 2018-01-17 ----------------- * Fixes to 10, 12, 16, 26 * Fixes to 6:1:0, 6:1:3, 6:1:4, 6:1:5, 8:1:0, 8:367:22, 8:200:24 * Add 8:200:21, 8:200:22 * Cleanup nmea_messages.py * Use a logger in python * Factor out ais_bitset.cpp * Stop calling ParseNmeaPayload mulitple times per message * Stay far away from -ffast-math * Fixes for CMake builds 0.16 - 2015-11-12 ----------------- * Make setup.py more functional for development and testing * Version number is now in ais/__init__.py * Travis-CI container testing * Converted README.org to README.rst * Converted Changelog.html to Changelog.md * Add ais/io.py * Add ais/nmea_messages.py for non-AIS messages * Some improvements to ais/stream and aisdecode, but they are now deprecated * TAG Block and USCG parsing converts message parts to python types * Moved python code out of the top level directory * Remove legacy SQL, Nagios and NAIS tools * Fix initialization of ais12, ais16, ais17, ais18, ais8_1_22, ais8_366_22 * Fix memory leak in python handling of ais20 * Add C++ decode_body and vdm handler 0.15 - 2015-06-16 ----------------- * Added libais namespace * Started using clang-format * Fix Ais18 carrier sense (CS) messages * Use AisBitset for bit decoding * Fix AIS_STATUS_STRINGS * Use std::array for fixed sized arrays * Update DAC enum with new values * Added ais/compatibility/gpsd.py * Added ais/stream (deprecated - please do not use) * Closed github issues 27, 31, 34, 36, 38, 42, 45, 46, 47, 51, 52, 53, 56, and 60 * Added ais/nmea.py, ais/tag_block.py, and ais/util_test.py * Rearranged the tree structure to have src/... for c++ and {ais,test}/ for python * Use more c++11: NULL -> nullptr * Add 8:367:22 Area Notice * python setup.py test mostly works * At least one test for all top levels except msg 6. * Add Travis CI testing 0.14 - 2014-04-22 ----------------- * Switch license from LGPL 3+ to Apache 2.0 * Msg 8:366:22 bit count check fixed * Fixed spare bit calculation in msg 8 * Added DAC country codes and FI message ids enums * Fixed error tracking in msg 6 and 8 sub-messages 0.13 - 2012-11-18 ----------------- * Switch to the [Google C++ style guide](http://code.google.com/p/google-styleguide/) and [`cpplint.py`](http://google-styleguide.googlecode.com/svn/trunk/cpplint/) * Lots of small bugs found in the code review process * Message constructors now start as status = AIS_UNINITIALIZED and set to AIS_OK when decoding is done * Switched to using initializers to call parent initializers in C++ * Removed a lot of duplicate and unused code. AIS_ERR_WRONG_MSG_TYPE removed. More still needs to be removed. * Rewrote the C++ NMEA parsing functions: GetBody, GetNthField, GetPad and Split. * Switch to using asserts for checks that imply coding errors within the library. 0.12 - 2012-11-05 ----------------- * Fix bit count bugs in 8_1_14, 8_1_15, 8_1_27 * Rewrote nth_field. Added split and delimiters * Folded in 366 header to ais.h * Lots and lots of style cleanup * Use std::foo, remove std:: from code * Documentation for Msg 17 * Testing of Msg 20 0.11 - 2012-10-29 ----------------- * New release because a large binary went out in 0.10 0.10 - 2012-10-29 ----------------- * Add a test directory and test of all top level msgs except 20 in python * Begin cleanup of test_libais.cpp * Almost all decoders require pad bits now * linted - lots of formatting changes * Start implmenting Msg 17 GNSS differential corrections * Convert FIX to TODO and put (schwehr) after each to assign them to myself. * remove bool casting of bitset[offset] * Implemented 8 1 26 * Clean up c++ logical oprators. Do not use and, or, and not * Message 24 needed pad. Fixed * Removed print() * remove almost all cout/cerr that were not in print() * Remove lots of dead code * Pass pad into ais.decode in python, but handle without 0.9 - 2012-10-19 ----------------- * ais.decode now requires the pad bits in python * Added RIS 8_200_{10,23,24,40,55} * Implmented the rest of Circ 236 BBM * Implmented all Circ 289 messages except ABM route and BBM env sensors * Implemented AIS messages 6, 9, 10, 12, 15-17, 20-22, 23, 25-27. Still payload work to do. * Imported rolker's CMakeList.txt 0.8 - 2012-05-12 ----------------- * MANIFEST.in now has VERSION * Implemented AIS messages 7, 13, 14, 18, 19, 24 0.7 - 2012-04-30 ----------------- * Added MANIFEST.in * setup.py compliant with pypi * Added AIS area/zone messages 8:1:22 and 8:366:22 0.6 - 2010-06-17 ----------------- * ais21.cpp: new file - AtoN status * ais.h: fix CHECKPOINT for emacs 23 * ais.h: Proper inheritance from AisMsg of message_id, repead_id and mmsi * ais.h: started trying to define 8_366_34 - zone msg * nagios_pg_ais.py: new file - monitor db statios with nagios ssh or snmp 0.5 - 2010-05-25 ----------------- * Still a lot of untested/unimplemented messages * docs: Include ESR's AIVDM.txt with permission * docs: MID / DAC / MMSI prefixes now listed in mid.csv * docs: dac/fi list * docs: More notes for message designers * Added msg 8 - 1:11 - IMO Met/Hydro * Added AIS msg 9 - SAR Position * nais2py.py: Added try except wrapper on ProcessingThread. Also try to track one off error found on call with timestamp converting to float * send_data.py: new file for testing * nais2py.py: LineQueue now has a custom drop handler for too many lines waiting. Could be better. * nais2py.py: Added threaded network interface. Seeing the network side overwhelm the processing thread * nais2py.py: Added response_class handling to VesselNames. Can be preloaded. Allows periodic name dump * nais2py.py: Added ENABLE_DB flag to try runs without database execute commands. Faster debugging * vessels.csv: new file - example preloading of vessel names as response ships 0.4 - 2010-05-11 ----------------- * nais2py.py: Started providing a command line interface * nais2py.py: Added PositionCache class * nais2py.py: Added distance calculation code. * nais2py.py: Changed the database table names and structure. Now vessel_name and vessel_pos * ais_lut.py: new file with lookup tables to make ais wire codes human readable. 0.3 - 2010-05-10 ----------------- * ais.c: added check_error_messages to make sure they are not out of sync * -D_GLIBCXX_DEBUG appears broken in GCC 4.[0-2] so do not use * ais_decode_normed.cpp: temporary C++ side decoding of normed AIVDM messages * nais2pg: added vesselname class to manage updates to postgresql * Added message 24 * Fixed python reference counting. Added XXSafeSetItem functions 0.2 - 2010-05-06 ----------------- * Added C++ error handling to classes via AIS_STATUS * C++ message now inherit from AisMsg and need to call init() in constructor * Added C++ messages 7_13, 14, 18, and 19 * aivdm_to_bits now has error checking * ais123.cpp renames to ais1_2_3.cpp * Switched to unicode in ais_py.cpp to support Python 3 * ais_py.cpp has strange INIT to handle Python 2 and 3 * nais2pg.py is starting to implement a new AIS feed to database daemon * Redid my old USCG regex to have better names with lower_lower style * LineQueue should now support reading through a socket, which I got wrong before * Total redo of the Normalization queue to be much lower overhead and to call the regex only once per line received. Only keep they body of all but the last message in a sequence. * test_libais.cpp is not really a test framework, but it does try out the pure C++ world. 0.1 - 2010-05-03 ----------------- * Able to decode messages 1-5 from python * Still a lot of work left to do! libais-master/LICENSE000066400000000000000000000010521344744644200146310ustar00rootroot00000000000000Copyright 2010 Kurt Schwehr. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. libais-master/MANIFEST.in000066400000000000000000000005711344744644200153670ustar00rootroot00000000000000include AUTHOR.txt include Changelog.md include CMakeLists.txt include LICENSE include Makefile-custom include Makefile.am include MANIFEST.in include README.rst include setup.py recursive-include bin * recursive-include docs *.txt *.org Makefile *.csv recursive-include src *.cpp *.cc *.h Makefile-custom recursive-include test *.py Makefile-custom recursive-include utils * libais-master/Makefile-custom000066400000000000000000000060651344744644200166050ustar00rootroot00000000000000# -*- makefile -*- VERSION:=${shell grep __version__ ais/__init__.py | cut -d\' -f 2} default: @echo @echo " Welcome to libais ${VERSION}" @echo @echo "Build options:" @echo @echo " all - build everything" @echo " clean - remove all objects and executables" @echo " tar - create a release source tar using VERSION" @echo @echo " test - C++, python2, and python3 unittests" @echo " python2 - build the python2 module" @echo " python3 - build the python3 module" @echo @echo "Read the README for more information" all: python2 python3 lib lib: (cd src/libais; make -f Makefile-custom all) DIST:=libais-${VERSION} TAR:=${DIST}.tar tar: rm -f ${TAR}.bz2 ${TAR} rm -rf ${DIST} mkdir ${DIST} cp -rp src test [A-Z]* *.py ${DIST}/ tar cf ${TAR} ${DIST} bzip2 -9 ${TAR} rm -rf ${DIST} python2: CFLAGS="-O3 -funroll-loops -fexpensive-optimizations" python2 setup.py build python3: CFLAGS="-std=c++11 -O0 -g -UNDEBUG" python3 setup.py build clean: (cd ais && make -f Makefile-custom clean) (cd src && make -f Makefile-custom clean) (cd test && make -f Makefile-custom clean) rm -rf build rm -f _ais.so rm -rf */__pycache__ rm -rf *.egg-info real-clean: clean rm -f MANIFEST rm -rf build dist find . -name .DS_Store | xargs rm -f rm -f test/typeexamples.gpsdecode.json find . -name \*.o | xargs rm -f find . -name \*.a | xargs rm -f find . -name \*.la | xargs rm -f find . -name \*.so | xargs rm -f echo "cmake files:" rm -rf CMakeCache.txt rm -f Makefile find src -name Makefile | xargs rm -f find . -name cmake_install.cmake | xargs rm -rf find . -name CMakeFiles | xargs rm -rf # This is more aggressive than the GNU idea of mainter-clean. maintainer-clean: real-clean rm -f aclocal.m4 rm -rf autom4te.cache rm -f config.{guess,log,status,sub} rm -f configure rm -f depcomp rm -rf inst rm -f install-sh rm -f libtool rm -f ltmain.sh rm -f m4/{libtool,ltoptions,ltsugar,ltversion,lt~obsolete}.m4 rm -f missing find . -name .deps | xargs rm -rf find . -name .libs | xargs rm -rf .PHONY: test test: (cd src; $(MAKE) -f Makefile-custom test) # Hard coded depends src/libais/ais.o: src/libais/ais.h src/libais/ais123.o: src/libais/ais.h src/libais/ais4_11.o: src/libais/ais.h src/libais/ais5.o: src/libais/ais.h src/libais/ais6.o: src/libais/ais.h src/libais/ais7_13.o: src/libais/ais.h src/libais/ais8_1_22.o: src/libais/ais.h src/libais/ais8_1_26.o: src/libais/ais.h src/libais/ais9.o: src/libais/ais.h src/libais/ais10.o: src/libais/ais.h src/libais/ais12.o: src/libais/ais.h src/libais/ais14.o: src/libais/ais.h src/libais/ais15.o: src/libais/ais.h src/libais/ais16.o: src/libais/ais.h src/libais/ais17.o: src/libais/ais.h src/libais/ais18.o: src/libais/ais.h src/libais/ais19.o: src/libais/ais.h src/libais/ais20.o: src/libais/ais.h src/libais/ais21.o: src/libais/ais.h src/libais/ais22.o: src/libais/ais.h src/libais/ais23.o: src/libais/ais.h src/libais/ais24.o: src/libais/ais.h src/libais/ais25.o: src/libais/ais.h src/libais/ais26.o: src/libais/ais.h src/libais/ais27.o: src/libais/ais.h src/libais/ais_py.o: src/libais/ais.h libais-master/Makefile.am000066400000000000000000000000471344744644200156630ustar00rootroot00000000000000ACLOCAL_AMFLAGS = -I m4 SUBDIRS = src libais-master/README.rst000066400000000000000000000072401344744644200153200ustar00rootroot00000000000000============ Introduction ============ .. image:: https://travis-ci.org/schwehr/libais.svg?branch=master :target: https://travis-ci.org/schwehr/libais .. image:: https://scan.coverity.com/projects/5519/badge.svg :target: https://scan.coverity.com/projects/5519 .. image:: https://codeclimate.com/github/schwehr/libais/badges/gpa.svg :target: https://codeclimate.com/github/schwehr/libais .. image:: https://badge.fury.io/py/libais.svg :target: http://badge.fury.io/py/libais Library for decoding maritime Automatic Identification System messages. See Also ======== `Automatic Identification System `_ Other open source AIS projects: - `GPSd `_ - `AisLib `_ - `noaadata `_ - `ais-areanotice `_ - `OpenCPN `_ - `aisparser `_ - `nmea_plus `_ Building ======== Building with Python -------------------- .. code-block:: console $ python setup.py build $ python setup.py install Testing with Python -------------------- .. code-block:: console $ virtualenv ve $ source ve/bin/activate $ python setup.py test Building with CMake ------------------- .. code-block:: console $ cmake . $ make Building with legacy Makefile ----------------------------- .. code-block:: console $ make -f Makefile-custom test Usage ===== There are two interfaces to libais, one high-level iterator based one and a low-level fast C++ only one. The iterator based interface is accessed the following way: .. code-block:: python import ais.stream with open("file.nmea") as f: for msg in ais.stream.decode(f): print msg To use the low-level C++ interface directly, you need to handle multi-line messages and padding yourself: .. code-block:: python import ais ais.decode('15PIIv7P00D5i9HNn2Q3G?wB0t0I', 0) ais.decode('402u=TiuaA000r5UJ`H4`?7000S:', 0) ais.decode('55NBjP01mtGIL@CW;SM`_ - IMO Circ 236 - IMO Circ 289 - EU RIS Developing ---------- The C++ code was switched to the Google style in November, 2012. Indenting should be by 2 spaces. http://google-styleguide.googlecode.com/svn/trunk/cpplint/ .. code-block:: console $ git clone https://github.com/schwehr/libais $ cd libais $ virtualenv ve $ source ve/bin/activate $ pip install -e .[tests] $ python setup.py test $ py.test --cov=ais --cov-report term-missing or .. code-block:: console $ git clone https://github.com/schwehr/libais $ cd libais $ virtualenv ve $ source ve/bin/activate $ pip install -e .[test] $ python setup.py develop $ python setup.py test $ py.test --cov=ais --cov-report term-missing libais-master/ais/000077500000000000000000000000001344744644200144025ustar00rootroot00000000000000libais-master/ais/Makefile-custom000066400000000000000000000001161344744644200173500ustar00rootroot00000000000000# -*- makefile -*- all: @echo "Nothing to do." clean: -rm -f *.pyc */*.pyc libais-master/ais/__init__.py000066400000000000000000000003701344744644200165130ustar00rootroot00000000000000"""Bring the C++ extension into the ais namespace.""" import logging logging.basicConfig() from _ais import decode from _ais import DecodeError from ais.io import open from ais.io import NmeaFile __license__ = 'Apache 2.0' __version__ = '0.17' libais-master/ais/compatibility/000077500000000000000000000000001344744644200172535ustar00rootroot00000000000000libais-master/ais/compatibility/__init__.py000066400000000000000000000002111344744644200213560ustar00rootroot00000000000000import warnings warnings.warn( "The compatability module is deprecated and will be removed in 1.0", FutureWarning, stacklevel=2 ) libais-master/ais/compatibility/gpsd.py000066400000000000000000000446641344744644200206000ustar00rootroot00000000000000"""Convert libais message dictionaries to GPSD JSON.""" import datetime class Mangler(object): """Convert libais dictionaries to gpsd dictionaries.""" def __init__(self, copy_tagblock_timestamp=True): self.copy_tagblock_timestamp = copy_tagblock_timestamp def __call__(self, msg): res = {} self.mangle(res, msg) method = 'mangle__%s' % (msg['id'],) if hasattr(self, method): getattr(self, method)(res, msg) for key in msg: method1 = 'mangle__%s__%s' % (key, msg['id']) method2 = 'mangle__%s' % (key,) if hasattr(self, method1): getattr(self, method1)(res, msg) elif hasattr(self, method2): getattr(self, method2)(res, msg) else: res[key] = msg[key] return res def mangle(self, res, msg): res['class'] = 'AIS' res['device'] = 'stdin' res['scaled'] = True if msg['id'] in (1, 2, 3): res['status'] = '15' res['status_text'] = self.nav_statuses[15] res['heading'] = 511 def mangle__id(self, res, msg): res['type'] = msg['id'] # Types 1, 2 and 3: Position Report Class A. def mangle__cog(self, res, msg): res['course'] = msg['cog'] def mangle__nav_status(self, res, msg): res['status'] = msg['nav_status'] res['status_text'] = self.nav_statuses[msg['nav_status']] def mangle__position_accuracy(self, res, msg): res['accuracy'] = msg['position_accuracy'] == 1 def mangle__repeat_indicator(self, res, msg): res['repeat'] = msg['repeat_indicator'] def mangle__rot_over_range(self, res, msg): pass def mangle__rot(self, res, msg): if msg['rot_over_range']: res['turn'] = None else: res['turn'] = msg['rot'] def mangle__sog(self, res, msg): res['speed'] = msg['sog'] def mangle__special_manoeuvre(self, res, msg): res['maneuver'] = msg['special_manoeuvre'] # spare, special_manoeuvre, sync_state, timestamp. def mangle__true_heading(self, res, msg): res['heading'] = msg['true_heading'] # utc_hour, utc_min, utc_spare. def mangle__x(self, res, msg): res['lon'] = msg['x'] def mangle__y(self, res, msg): res['lat'] = msg['y'] # Type 4: Base Station Report def mangle__year(self, res, msg): res['timestamp'] = '%04d-%02d-%02dT%02d:%02d:%02dZ' % (msg['year'], msg['month'], msg['day'], msg['hour'], msg['minute'], msg['second']) def mangle__month(self, res, msg): pass def mangle__day(self, res, msg): pass def mangle__hour(self, res, msg): pass def mangle__minute(self, res, msg): pass def mangle__second(self, res, msg): pass def mangle__fix_type(self, res, msg): res['epfd'] = msg['fix_type'] res['epfd_text'] = self.fix_types.get(msg['fix_type'], self.fix_types[0]) # Type 5: Static and Voyage Related Data ##### def mangle__name(self, res, msg): res['shipname'] = msg['name'].strip('@').strip() def mangle__destination(self, res, msg): res['destination'] = msg['destination'].strip('@').strip() def mangle__callsign(self, res, msg): res['callsign'] = msg['callsign'].strip('@').strip() def mangle__dim_a(self, res, msg): res['to_bow'] = msg['dim_a'] def mangle__dim_b(self, res, msg): res['to_stern'] = msg['dim_b'] def mangle__dim_c(self, res, msg): res['to_port'] = msg['dim_c'] def mangle__dim_d(self, res, msg): res['to_starboard'] = msg['dim_d'] def mangle__eta_day(self, res, msg): if msg['eta_month'] < 1 or msg['eta_day'] == 0 or msg['eta_hour'] == 24: return if msg['eta_minute'] == 60: return year = 0 if 'year' in msg: year = msg['year'] elif 'tagblock_timestamp' in msg: year = datetime.datetime.utcfromtimestamp(msg['tagblock_timestamp']).year try: eta = datetime.datetime(year, msg['eta_month'], msg['eta_day'], msg['eta_hour'], msg['eta_minute']) except: # TODO(redhog): What exception is being triggered and why? pass else: res['eta'] = eta.strftime('%Y-%m-%dT%H:%H:%S.%fZ') def mangle__eta_hour(self, res, msg): pass def mangle__eta_minute(self, res, msg): pass def mangle__eta_month(self, res, msg): pass def mangle__imo_num(self, res, msg): res['imo'] = str(msg['imo_num']) def mangle__type_and_cargo(self, res, msg): res['shiptype'] = msg['type_and_cargo'] try: res['shiptype_text'] = self.ship_types[msg['type_and_cargo']] except KeyError: res['shiptype_text'] = '%d - Unknown' % msg['type_and_cargo'] # Type 6: Binary Addressed Message def mangle__mmsi_dest(self, res, msg): res['dest_mmsi'] = msg['mmsi_dest'] def mangle__seq(self, res, msg): res['seqno'] = msg['seq'] # Note: retransmit has different values for the same message from gpsd... bug? # Type 7: Binary Acknowledge def mangle__7(self, res, unused_msg): """Values will be set in mangle__acks.""" res.update({ 'mmsi1': 0, 'mmsi2': 0, 'mmsi3': 0, 'mmsi4': 0}) def mangle__acks(self, res, msg): for idx, (mmsi, unused_seq_num) in enumerate(msg['acks']): res['mmsi%s' % (idx+1,)] = mmsi # Type 8: Binary Broadcast Message def mangle__fi(self, res, msg): res['fid'] = msg['fi'] # Note: Data is missing from libais message # Type 9: Standard SAR Aircraft Position Report def mangle__timestamp(self, res, msg): res['second'] = msg['timestamp'] # Type 12: Addressed Safety-Related Message def mangle__seq_num(self, res, msg): res['seqno'] = msg['seq_num'] def mangle__retransmitted(self, res, msg): res['retransmit'] = msg['retransmitted'] # Type 13: Safety-Related Acknowledgement def mangle__13(self, res, unused_msg): res.update({ 'mmsi1': 0, 'mmsi2': 0, 'mmsi3': 0, 'mmsi4': 0}) # Type 15: Interrogation def mangle__15(self, res, unused_msg): res.update({ 'mmsi1': 0, 'offset1_1': 0, 'type1_1': 0, 'offset1_2': 0, 'type1_2': 0, 'mmsi2': 0, 'offset2_1': 0, 'type2_1': 0, 'offset2_2': 0, 'type2_2': 0}) def mangle__mmsi_1(self, res, msg): res['mmsi1'] = msg['mmsi_1'] def mangle__mmsi_2(self, res, msg): res['mmsi2'] = msg['mmsi_2'] def mangle__slot_offset_1_1(self, res, msg): res['offset1_1'] = msg['slot_offset_1_1'] def mangle__slot_offset_1_2(self, res, msg): res['offset1_2'] = msg['slot_offset_1_2'] def mangle__msg_1_1(self, res, msg): res['type1_1'] = msg['msg_1_1'] def mangle__dest_msg_1_2(self, res, msg): res['type1_2'] = msg['dest_msg_1_2'] def mangle__msg_2_1(self, res, msg): res['type2_1'] = msg['msg_2_1'] def mangle__dest_msg_2_2(self, res, msg): res['type2_2'] = msg['dest_msg_2_2'] # Type 16: Assignment Mode Command def mangle__16(self, res, unused_msg): res.update({ 'increment1': 0, 'offset1': 0, 'mmsi1': 0, 'increment2': 0, 'offset2': 0, 'mmsi2': 0}) def mangle__inc_a(self, res, msg): res['increment1'] = msg['inc_a'] def mangle__dest_mmsi_a(self, res, msg): res['mmsi1'] = msg['dest_mmsi_a'] def mangle__offset_a(self, res, msg): res['offset1'] = msg['offset_a'] def mangle__inc_b(self, res, msg): res['increment2'] = msg['inc_b'] def mangle__dest_mmsi_b(self, res, msg): res['mmsi2'] = msg['dest_mmsi_b'] def mangle__offset_b(self, res, msg): res['offset2'] = msg['offset_b'] # Type 17: DGNSS Broadcast Binary Message # Note: Data is missing from libais message # Type 18: Standard Class B CS Position Report ##### def mangle__band_flag(self, res, msg): res['band'] = msg['band_flag'] == 1 def mangle__commstate_flag(self, res, msg): res['cs'] = msg['commstate_flag'] == 1 def mangle__display_flag(self, res, msg): res['display'] = msg['display_flag'] == 1 def mangle__dsc_flag(self, res, msg): res['dsc'] = msg['dsc_flag'] == 1 def mangle__m22_flag(self, res, msg): res['msg22'] = msg['m22_flag'] == 1 def mangle__mode_flag(self, res, msg): res['mode'] = msg['mode_flag'] == 1 def mangle__unit_flag(self, res, msg): res['unit'] = msg['unit_flag'] == 1 # Type 19: Extended Class B CS Position Report def mangle__assigned_mode(self, res, msg): res['assigned'] = msg['assigned_mode'] == 1 # Type 20 Data Link Management Message def mangle__reservations(self, res, msg): for idx, reservation in enumerate(msg['reservations']): i = str(idx + 1) res['increment' + i] = reservation['incr'] res['number' + i] = reservation['num_slots'] res['offset' + i] = reservation['offset'] res['timeout' + i] = reservation['timeout'] # Type 21: Aid-to-Navigation Report def mangle__aton_type(self, res, msg): res['aid_type'] = msg['aton_type'] res['aid_type_text'] = self.aton_types[msg['aton_type']] def mangle__aton_status(self, res, msg): res['regional'] = msg['aton_status'] def mangle__name__21(self, res, msg): res['name'] = msg['name'].strip('@').strip() def mangle__off_pos(self, res, msg): res['off_position'] = msg['off_pos'] def mangle__virtual_aton(self, res, msg): res['virtual_aid'] = msg['virtual_aton'] # Type 22: Channel Management def mangle__chan_a(self, res, msg): res['channel_a'] = msg['chan_a'] def mangle__chan_b(self, res, msg): res['channel_b'] = msg['chan_b'] def mangle__chan_a_bandwidth(self, res, msg): res['band_a'] = msg['chan_a_bandwidth'] def mangle__chan_b_bandwidth(self, res, msg): res['band_b'] = msg['chan_b_bandwidth'] def mangle__power_low(self, res, msg): res['power'] = msg['power_low'] def mangle__txrx_mode(self, res, msg): res['txrx'] = msg['txrx_mode'] def mangle__x1(self, res, msg): res['ne_lon'] = msg['x1'] def mangle__x2(self, res, msg): res['sw_lon'] = msg['x2'] def mangle__y2(self, res, msg): res['sw_lat'] = msg['y2'] def mangle__y1(self, res, msg): res['ne_lat'] = msg['y1'] def mangle__zone_size(self, res, msg): res['zonesize'] = msg['zone_size'] # Type 23: Group Assignment Command def mangle__station_type(self, res, msg): res['stationtype'] = msg['station_type'] res['stationtype_text'] = self.station_types[msg['station_type']] def mangle__interval_raw(self, res, msg): res['interval'] = msg['interval_raw'] # Tagblock data def mangle__tagblock_timestamp(self, res, msg): res['tagblock_timestamp'] = datetime.datetime.utcfromtimestamp( msg['tagblock_timestamp']).strftime('%Y-%m-%dT%H:%M:%S.%fZ') if self.copy_tagblock_timestamp and 'year' not in msg: res['timestamp'] = res['tagblock_timestamp'] # Mappings station_types = { 0: 'All types of mobiles', 1: 'Reserved for future use', 2: 'All types of Class B mobile stations', 3: 'SAR airborne mobile station', 4: 'Aid to Navigation station', 5: 'Class B shipborne mobile station (IEC62287 only)', 6: 'Regional use and inland waterways', 7: 'Regional use and inland waterways', 8: 'Regional use and inland waterways', 9: 'Regional use and inland waterways', 10: 'Reserved for future use', 11: 'Reserved for future use', 12: 'Reserved for future use', 13: 'Reserved for future use', 14: 'Reserved for future use', 15: 'Reserved for future use'} aton_types = { 0: 'Default, Type of Aid to Navigation not specified', 1: 'Reference point', 2: 'RACON (radar transponder marking a navigation hazard)', 3: 'Fixed structure off shore, such as oil platforms, wind farms, rigs.', 4: 'Spare, Reserved for future use.', 5: 'Light, without sectors', 6: 'Light, with sectors', 7: 'Leading Light Front', 8: 'Leading Light Rear', 9: 'Beacon, Cardinal N', 10: 'Beacon, Cardinal E', 11: 'Beacon, Cardinal S', 12: 'Beacon, Cardinal W', 13: 'Beacon, Port hand', 14: 'Beacon, Starboard hand', 15: 'Beacon, Preferred Channel port hand', 16: 'Beacon, Preferred Channel starboard hand', 17: 'Beacon, Isolated danger', 18: 'Beacon, Safe water', 19: 'Beacon, Special mark', 20: 'Cardinal Mark N', 21: 'Cardinal Mark E', 22: 'Cardinal Mark S', 23: 'Cardinal Mark W', 24: 'Port hand Mark', 25: 'Starboard hand Mark', 26: 'Preferred Channel Port hand', 27: 'Preferred Channel Starboard hand', 28: 'Isolated danger', 29: 'Safe Water', 30: 'Special Mark', 31: 'Light Vessel / LANBY / Rigs'} fix_types = { 0: 'Undefined', 1: 'GPS', 2: 'GLONASS', 3: 'Combined GPS/GLONASS', 4: 'Loran-C', 5: 'Chayka', 6: 'Integrated navigation system', 7: 'Surveyed', 8: 'Galileo'} # Match the output of gpsd 3.11. nav_statuses = { 0: 'Under way using engine', 1: 'At anchor', 2: 'Not under command', 3: 'Restricted manoeuverability', # Maneuverability. 4: 'Constrained by her draught', 5: 'Moored', 6: 'Aground', 7: 'Engaged in fishing', 8: 'Under way sailing', # Reserved for future amendment of navigational status for ships # carrying DG, HS, or MP, or IMO hazard or pollutant category C, # high speed craft (HSC). 9: 'Reserved for HSC', # Reserved for future amendment of navigational status for ships # carrying dangerous goods (DG), harmful substances (HS) or marine # pollutants (MP), or IMO hazard or pollutant category A, wing in # ground (WIG). 10: 'Reserved for WIG', # Power-driven vessel towing astern (regional use). 11: 'Reserved', # Power-driven vessel pushing ahead or towing alongside (regional use). 12: 'Reserved', # Reserved for future use. 13: 'Reserved', # AIS-SART (active), MOB-AIS, EPIRB-AIS, 14: 'Reserved', # Default (also used by AIS-SART, MOB-AIS and EPIRB-AIS under test). 15: 'Not defined'} ship_types = { 0: 'Not available', 1: 'Reserved for future use', 2: 'Reserved for future use', 3: 'Reserved for future use', 4: 'Reserved for future use', 5: 'Reserved for future use', 6: 'Reserved for future use', 7: 'Reserved for future use', 8: 'Reserved for future use', 9: 'Reserved for future use', 10: 'Reserved for future use', 11: 'Reserved for future use', 12: 'Reserved for future use', 13: 'Reserved for future use', 14: 'Reserved for future use', 15: 'Reserved for future use', 16: 'Reserved for future use', 17: 'Reserved for future use', 18: 'Reserved for future use', 19: 'Reserved for future use', 20: 'Wing in ground (WIG), all ships of this type', 21: 'Wing in ground (WIG), Hazardous category A', 22: 'Wing in ground (WIG), Hazardous category B', 23: 'Wing in ground (WIG), Hazardous category C', 24: 'Wing in ground (WIG), Hazardous category D', 25: 'Wing in ground (WIG), Reserved for future use', 26: 'Wing in ground (WIG), Reserved for future use', 27: 'Wing in ground (WIG), Reserved for future use', 28: 'Wing in ground (WIG), Reserved for future use', 29: 'Wing in ground (WIG), Reserved for future use', 30: 'Fishing', 31: 'Towing', 32: 'Towing: length exceeds 200m or breadth exceeds 25m', 33: 'Dredging or underwater ops', 34: 'Diving ops', 35: 'Military ops', 36: 'Sailing', 37: 'Pleasure Craft', 38: 'Reserved', 39: 'Reserved', 40: 'High speed craft (HSC), all ships of this type', 41: 'High speed craft (HSC), Hazardous category A', 42: 'High speed craft (HSC), Hazardous category B', 43: 'High speed craft (HSC), Hazardous category C', 44: 'High speed craft (HSC), Hazardous category D', 45: 'High speed craft (HSC), Reserved for future use', 46: 'High speed craft (HSC), Reserved for future use', 47: 'High speed craft (HSC), Reserved for future use', 48: 'High speed craft (HSC), Reserved for future use', 49: 'High speed craft (HSC), No additional information', 50: 'Pilot Vessel', 51: 'Search and Rescue vessel', 52: 'Tug', 53: 'Port Tender', 54: 'Anti-pollution equipment', 55: 'Law Enforcement', 56: 'Spare - Local Vessel', 57: 'Spare - Local Vessel', 58: 'Medical Transport', 59: 'Noncombatant ship according to RR Resolution No. 18', 60: 'Passenger, all ships of this type', 61: 'Passenger, Hazardous category A', 62: 'Passenger, Hazardous category B', 63: 'Passenger, Hazardous category C', 64: 'Passenger, Hazardous category D', 65: 'Passenger, Reserved for future use', 66: 'Passenger, Reserved for future use', 67: 'Passenger, Reserved for future use', 68: 'Passenger, Reserved for future use', 69: 'Passenger, No additional information', 70: 'Cargo, all ships of this type', 71: 'Cargo, Hazardous category A', 72: 'Cargo, Hazardous category B', 73: 'Cargo, Hazardous category C', 74: 'Cargo, Hazardous category D', 75: 'Cargo, Reserved for future use', 76: 'Cargo, Reserved for future use', 77: 'Cargo, Reserved for future use', 78: 'Cargo, Reserved for future use', 79: 'Cargo, No additional information', 80: 'Tanker, all ships of this type', 81: 'Tanker, Hazardous category A', 82: 'Tanker, Hazardous category B', 83: 'Tanker, Hazardous category C', 84: 'Tanker, Hazardous category D', 85: 'Tanker, Reserved for future use', 86: 'Tanker, Reserved for future use', 87: 'Tanker, Reserved for future use', 88: 'Tanker, Reserved for future use', 89: 'Tanker, No additional information', 90: 'Other Type, all ships of this type', 91: 'Other Type, Hazardous category A', 92: 'Other Type, Hazardous category B', 93: 'Other Type, Hazardous category C', 94: 'Other Type, Hazardous category D', 95: 'Other Type, Reserved for future use', 96: 'Other Type, Reserved for future use', 97: 'Other Type, Reserved for future use', 98: 'Other Type, Reserved for future use', 99: 'Other Type, no additional information'} mangle = Mangler() libais-master/ais/io.py000066400000000000000000000102041344744644200153600ustar00rootroot00000000000000"""A file-like interface to NMEA parsing. >>> import ais >>> import json >>> with ais.open('test/data/typeexamples.nmea') as src: ... msg = next(src) ... print(json.dumps(next(msg), indent=4, sort_keys=True)) { "decoded": { "day": 0, "fix_type": 1, "hour": 24, "id": 4, "md5": "7ecb187e7edc1789de436b0c2ccf2963", "minute": 60, "mmsi": 3669713, "month": 0, "position_accuracy": 0, "raim": false, "repeat_indicator": 0, "second": 60, "slot_number": 2105, "slot_timeout": 2, "spare": 0, "sync_state": 1, "transmission_ctl": 0, "x": 181.0, "y": 91.0, "year": 0 }, "line_nums": [ 1 ], "line_type": "USCG", "lines": [ "!AIVDM,1,1,,A,403Ovl@000Htt[0-9A-Fa-f]{2})$') SENTENCE_START_RE = re.compile(r'^[\\$!]') # Types of NMEA lines. TEXT = 'TEXT' BARE = 'BARE' TAGB = 'TAGB' USCG = 'USCG' # Generally match line type. # TODO(schwehr): Add ^ to the beginning of each? Faster or slower? ID_BARE_RE = re.compile(r'[!$][A-Z]{4}[^*!$]*\*[0-9A-Fa-f]{2}$') ID_USCG_RE = re.compile(r'[!$][A-Z]{4}[^*!$]*\*[0-9A-Fa-f]{2},[^*!$]+$') ID_TAGB_RE = re.compile(r'\\([a-zA-Z]:[^*,\\]*[,]?)+\*[0-9A-Fa-f]{2}(\\.*)?$') # Specific match for an AIS line. ID_BARE_VDM_RE = re.compile(r'[!$][A-Z]{2}VD[MO][^*!$]*\*[0-9A-Fa-f]{2}') def _Checksum(sentence): """Compute the NMEA checksum for a payload.""" checksum = 0 for char in sentence: checksum ^= ord(char) checksum_str = '%02x' % checksum return checksum_str.upper() def Checksum(sentence): """Compute the checksum for a NMEA sentence or TAG BLOCK prefix. Args: sentence: str, NMEA tag block sentence starting with a backslash, bang, or dollar sign. The string must end with a star and the 2 character checksum. Returns: The computed checksum. """ sentence = SENTENCE_START_RE.sub('', sentence) sentence = CHECKSUM_RE.sub('', sentence) return _Checksum(sentence) def LineType(line): """Give the type of NMEA line this is. Args: line: str, Text to try to interpret. Returns: String that is one of 'TEXT', 'BARE', 'USCG', or 'TAGB'. """ if not line: return TEXT if line[0] in '!$': if ID_BARE_RE.match(line): return BARE elif ID_USCG_RE.match(line): return USCG return TEXT if line[0] == '\\' and ID_TAGB_RE.match(line): return TAGB return TEXT libais-master/ais/nmea_messages.py000066400000000000000000000272461344744644200175760ustar00rootroot00000000000000"""Parse non-AIS NMEA messages. National Marine Electronics Association (NMEA) messages are comma separated value lines of text that start with sender or "talker" code and the sentence type. They finish up with a checksum. For example, this is a time message (ZDA) from an "Integrated Navigation" (IN) system. This module is only concerned with the actual messages and not the wire level transfer of the data as defined in NMEA 0183 or NMEA 2000. Most users of libais will eventually want to parse the surround NMEA messages for time, AIS receiver status, station location, weather, etc. The core theme of libais is handling the AIVDM messages. However, leaving out the other NMEA messages means that a use of libais will have to write their own wrapper for other NMEA messages or bring in another library like GPSD. Very few decoders have support for messages like ABK, ABM, BBM, etc. that are often encountered with AIS logs. Additionally, the official NMEA specification is paywalled and public documentation covers only some of the messages. TODO(schwehr): Make sure this works with proprietary messages. TODO(schwehr): Factor our a generic handler for messages that do not need customization of the result. Should there be a handler parent class? TODO(schwehr): Consider using namedtuple rather than dict. See also: http://www.catb.org/gpsd/NMEA.html https://en.wikipedia.org/wiki/NMEA_0183 """ import datetime import logging import math import re from ais import util logger = logging.getLogger('libais') NMEA_HEADER_RE_STR = r'[$!](?P[A-Z][A-Z])' NMEA_SENTENCE_RE_STR = NMEA_HEADER_RE_STR + r'(?P[A-Z]{3,4}),' NMEA_CHECKSUM_RE_STR = r'\*(?P[0-9A-F][0-9A-F])' NMEA_HEADER_RE = re.compile(NMEA_HEADER_RE_STR) NMEA_SENTENCE_RE = re.compile(NMEA_SENTENCE_RE_STR) NMEA_CHECKSUM_RE = re.compile(NMEA_CHECKSUM_RE_STR) # TODO(schwehr): Rename TimeUtc. def TimeUtc(fields): seconds, fractional_seconds = FloatSplit(float(fields['seconds'])) microseconds = int(math.floor(fractional_seconds * 1e6)) fields['seconds'] = seconds fields['microseconds'] = microseconds fields['hours'] = util.MaybeToNumber(fields['hours']) fields['minutes'] = util.MaybeToNumber(fields['minutes']) when = datetime.time( fields['hours'], fields['minutes'], seconds, microseconds ) fields['when'] = when ABK_RE_STR = ( NMEA_HEADER_RE_STR + r'(?PABK),' r'(?P\d+)?,' r'(?P[AB])?,' r'(?P\d+)?,' r'(?P\d+)?,' r'(?P\d+)' + NMEA_CHECKSUM_RE_STR ) ABK_RE = re.compile(ABK_RE_STR) # TODO(schwehr): Document that handlers return None if they fail to match # the line to their message. def HandleAbk(line): """Decode AIS Addressed and Binary Broadcast Acknowledgement (ABK).""" try: fields = ABK_RE.match(line).groupdict() except TypeError: return result = { 'message': 'ABK', 'talker': fields['talker'], 'chan': fields['chan'], } for field in ('mmsi', 'message_id', 'seq_num', 'ack_type'): result[field] = util.MaybeToNumber(fields[field]) return result ADS_RE_STR = ( NMEA_HEADER_RE_STR + r'(?PADS),' r'(?P[^,]+?),' r'(?P(?P\d\d)(?P\d\d)(?P\d\d\.\d*))?,' r'(?P)[AV]?,' r'(?P\d)?,' r'(?P[EINS])?,' r'(?P[EIN])?' + NMEA_CHECKSUM_RE_STR ) ADS_RE = re.compile(ADS_RE_STR) def HandleAds(line): """Decode Automatic Device Status (ADS).""" try: fields = ADS_RE.match(line).groupdict() except TypeError: return TimeUtc(fields) return { 'message': 'ADS', 'talker': fields['talker'], 'id': fields['id'], 'alarm': fields['alarm'], 'time_sync_method': util.MaybeToNumber(fields['time_sync_method']), 'pos_src': fields['pos_src'], 'time_src': fields['time_src'], 'when': fields['when'], } ALR_RE_STR = ( NMEA_HEADER_RE_STR + r'(?PALR),' r'(?P(?P\d\d)(?P\d\d)(?P\d\d\.\d*))?,' r'(?P\d+)?,' r'(?P[AV]),' r'(?P[AV]),' r'(?P[^*]*)' + NMEA_CHECKSUM_RE_STR ) ALR_RE = re.compile(ALR_RE_STR) def HandleAlr(line): """Decode Set Alarm State (ALR).""" try: fields = ALR_RE.match(line).groupdict() except TypeError: return seconds, fractional_seconds = FloatSplit(float(fields['seconds'])) microseconds = int(math.floor(fractional_seconds * 1e6)) when = datetime.time( int(fields['hours']), int(fields['minutes']), seconds, microseconds ) result = { 'ack_state_raw': fields['ack_state'], 'condition_raw': fields['condition'], 'id': fields['id'], 'message': 'ALR', 'talker': fields['talker'], 'text': fields['text'], 'time': when, } if fields['ack_state'] in 'AV': result['ack_state'] = ('A' == fields['ack_state']) if fields['condition'] in 'AV': result['condition'] = ('A' == fields['condition']) return result BBM_RE_STR = ( NMEA_HEADER_RE_STR + r'(?PBBM),' r'(?P\d),' r'(?P\d),' r'(?P\d),' r'(?P\d),' r'(?P\d),' r'(?P[^,*]*),' r'(?P\d)' + NMEA_CHECKSUM_RE_STR ) BBM_RE = re.compile(BBM_RE_STR) def HandleBbm(line): """Decode Binary Broadcast Message (BBM) sentence.""" try: fields = BBM_RE.match(line).groupdict() except TypeError: return result = { 'message': 'BBM', 'talker': fields['talker'], 'body': fields['body'], } for field in ('sen_tot', 'sen_num', 'seq_num', 'chan', 'message_id', 'fill_bits'): result[field] = util.MaybeToNumber(fields[field]) return result FSR_RE_STR = ( NMEA_HEADER_RE_STR + r'(?PFSR),' r'(?P[^,]+)?,' r'(?P(?P\d\d)(?P\d\d)(?P\d\d(\.\d*)?))?,' r'(?P[A-Z])?,' r'(?P\d+)?,' r'(?P\d+)?,' r'(?P\d+)?,' r'(?P\d+)?,' r'(?P\d+)?,' r'(?P[-]?\d+)?,' r'(?P\d+(\.d*)?)?' + NMEA_CHECKSUM_RE_STR ) FSR_RE = re.compile(FSR_RE_STR) def HandleFsr(line): try: fields = FSR_RE.match(line).groupdict() except TypeError: return seconds, fractional_seconds = FloatSplit(float(fields['seconds'])) microseconds = int(math.floor(fractional_seconds * 1e6)) when = datetime.time( int(fields['hours']), int(fields['minutes']), seconds, microseconds ) result = { 'message': 'FSR', 'id': fields['id'], 'chan': fields['chan'], 'time': when, } for field in ('slots_recv', 'slots_self', 'crc_fails', 'slots_reserved', 'slots_reserved_self', 'noise_db', 'slots_above_noise'): if fields[field] is not None and fields[field]: result[field] = util.MaybeToNumber(fields[field]) return result GGA_RE_STR = ( NMEA_HEADER_RE_STR + r'(?PGGA),' r'(?P(?P\d\d)(?P\d\d)(?P\d\d\.\d*))?,' r'(?P(?P\d\d)(?P\d\d\.\d*))?,' r'(?P[NS])?,' r'(?P(?P\d{3})(?P\d\d\.\d*))?,' r'(?P[EW])?,' r'(?P\d+)?,' r'(?P\d+)?,' r'(?P\d+\.\d+)?,' r'(?P[+-]?\d+(\.\d+)?)?,' r'(?PM)?,' r'(?P[+-]?\d+(\.\d+)?)?,' r'(?PM)?,' r'(?P[A-Z0-9.]*)?,' r'(?P\d+)?' + NMEA_CHECKSUM_RE_STR ) GGA_RE = re.compile(GGA_RE_STR) def HandleGga(line): try: fields = GGA_RE.match(line).groupdict() except TypeError: return seconds, fractional_seconds = FloatSplit(float(fields['seconds'])) microseconds = int(math.floor(fractional_seconds * 1e6)) when = datetime.time( int(fields['hours']), int(fields['minutes']), seconds, microseconds ) x = int(fields['lon_deg']) + float(fields['lon_min']) / 60.0 if fields['longitude_hemisphere'] == 'W': x = -x y = int(fields['lat_deg']) + float(fields['lat_min']) / 60.0 if fields['latitude_hemisphere'] == 'S': y = -y result = { 'message': 'GGA', 'time': when, 'longitude': x, 'latitude': y, } for field in ('gps_quality', 'satellites', 'hdop', 'antenna_height', 'antenna_height_units', 'geoidal_height', 'geoidal_height_units', 'differential_ref_station', 'differential_age_sec'): if fields[field] is not None and fields[field]: result[field] = util.MaybeToNumber(fields[field]) return result TXT_RE_STR = ( NMEA_HEADER_RE_STR + r'(?PTXT),' r'(?P\d+)?,' r'(?P\d+)?,' r'(?P\d+)?,' r'(?P[^*]*)?' + NMEA_CHECKSUM_RE_STR ) TXT_RE = re.compile(TXT_RE_STR) def HandleTxt(line): """Decode Text Transmission (TXT). TODO(schwehr): Handle encoded characters. e.g. ^21 is a '!'. Args: line: A string containing a NMEA TXT message. Returns: A dictionary with the decoded fields or None if it cannot decode the message. """ try: fields = TXT_RE.match(line).groupdict() except TypeError: return result = { 'message': 'TXT', 'talker': fields['talker'], 'text': fields['text'], } for field in ('sen_tot', 'sen_num', 'seq_num'): result[field] = util.MaybeToNumber(fields[field]) return result # Time in UTC. ZDA_RE_STR = ( NMEA_HEADER_RE_STR + r'(?PZDA),' r'(?P(?P\d\d)(?P\d\d)(?P\d\d(\.\d*)?))?,' r'(?P\d\d)?,' r'(?P\d\d)?,' r'(?P\d{4})?,' r'(?P[+-]?(\d+))?,' r'(?P(\d+))?' # ',?' + NMEA_CHECKSUM_RE_STR ) ZDA_RE = re.compile(ZDA_RE_STR) def FloatSplit(value): base = math.trunc(value) fractional = value - base return base, fractional def HandleZda(line): try: fields = ZDA_RE.match(line).groupdict() except TypeError: return for field in ('year', 'month', 'day', 'hours', 'minutes', 'zone_hours', 'zone_minutes'): if fields[field] is not None and fields[field]: fields[field] = util.MaybeToNumber(fields[field]) seconds, fractional_seconds = FloatSplit(float(fields['seconds'])) microseconds = int(math.floor(fractional_seconds * 1e6)) when = datetime.datetime( fields['year'], fields['month'], fields['day'], fields['hours'], fields['minutes'], seconds, microseconds) # TODO(schwehr): Convert this to Unix UTC seconds. return { 'message': 'ZDA', 'talker': fields['talker'], 'datetime': when, 'zone_hours': fields['zone_hours'], 'zone_minutes': fields['zone_minutes'], } HANDLERS = { 'ABK': HandleAbk, 'ADS': HandleAds, 'ALR': HandleAlr, 'BBM': HandleBbm, 'FSR': HandleFsr, 'GGA': HandleGga, 'TXT': HandleTxt, 'ZDA': HandleZda } def DecodeLine(line): """Decode a NMEA line. Args: line: A string with single line containing a possible NMEA sentence. Returns: A dict mapping the message and sentence fields or None if it is unable to decode the line. """ line = line.rstrip() try: sentence = NMEA_SENTENCE_RE.match(line).groupdict()['sentence'] except AttributeError: # Not NMEA. return if sentence not in HANDLERS: logger.info('skipping: %s', line) return try: message = HANDLERS[sentence](line) except AttributeError: logger.info('Unable to decode line with handle: %s', line) return return message libais-master/ais/nmea_queue.py000066400000000000000000000111331344744644200170770ustar00rootroot00000000000000"""Manage a stream of NMEA messages with optional metadata. TODO(schwehr): Add support for decoding non-AIS NMEA messages (e.g. ZDA). """ from ais import nmea from ais import tag_block from ais import uscg from ais import vdm import six.moves.queue as Queue class Error(Exception): pass def GetOrNone(queue): try: return queue.get(block=False) except Queue.Empty: return class NmeaQueue(Queue.Queue): # pylint: disable=line-too-long r"""Process mixed text, bare NMEA or NMEA with TAG BLOCK or USCG metadata. Raw text lines are passed through without a decode stage. "Bare" AIS NMEA lines are decoded and passed through without a decode or time field in resulting decodes. The USCG and TAG Block metadata messages will get a list of times and a "decoded" field. The decoded field contains the AIS message contents. The extra metadata is expected to be accessed through the matches field. Example usage: q = nmea_queue.NmeaQueue() q.qsize() # Returns 0 # Raw text. q.put('Hello world') q.qsize() # Returns 1 q.get() # Returns a dictionary with lines containing the input. q.qsize() # Returns 0 # Bare NMEA AIS VDM. q.put('!SAVDM,2,1,9,B,55NG9T000001L@GKCSPLDlTpV22222222222220t2PI5540Ht00`88888888,0*33') q.put('!SAVDM,2,2,9,B,88888888880,2*34') q.get() # Returns a dictionary with no times or extra metadata in matches. # USCG NMEA AIS VDM. q.put('!SAVDM,2,1,9,B,55NG9T000001L@GKCSPLDlTpV22222222222220t2PI5540Ht00`88888888,0*33,b003669709,1428944320') q.put('!SAVDM,2,2,9,B,88888888880,2*34,b003669709,1428944320') q.get() # Returns a dictionary with times and extra metadata. # TAG Block NMEA AIS VDM. q.put('\g:1-3-144865,n:325372,s:r11CSDO1,c:1428944580*6C\!AIVDM,2,1,9,A,55MkD8000001L@7?37L4AHDqAE8F0Pu8p8huLE8t28P,0*1E') q.put('\g:2-3-144865,n:325373*2D\!AIVDM,2,2,9,A,666v<04ThC`12AAkp0hH88888880,2*7A') q.get() # Return a dictionary with times and extra metadata. This sample line is a single line TAG Block position report intended for a satellite: \n:80677,s:b003669952,c:1428884269*2A\!SAVDM,1,1,,B,K8VSqb9LdU28WP8<,0*17 A queue get() returns: {'decoded': { 'cog': 131, 'gnss': True, 'id': 27, 'md5': '50898a3435865cf76f1b502b2821672b', 'mmsi': 577305000, 'nav_status': 5, 'position_accuracy': 1, 'raim': False, 'repeat_indicator': 0, 'sog': 0, 'spare': 0, 'x': -90.2066650390625, 'y': 29.145000457763672}, 'line_nums': [9], 'line_type': ('TAGB',), 'lines': [ '\\n:80677,s:b003669952,c:1428884269*2A' '\\!SAVDM,1,1,,B,K8VSqb9LdU28WP8<,0*17'], 'matches': [{ 'dest': None, 'group': None, 'group_id': None, 'line_num': '80677', 'metadata': 'n:80677,s:b003669952,c:1428884269*2A', 'payload': '!SAVDM,1,1,,B,K8VSqb9LdU28WP8<,0*17', 'quality': None, 'rcvr': 'b003669952', 'rel_time': None, 'sentence_num': None, 'sentence_tot': None, 'tag_checksum': '2A', 'text': None, 'text_date': None, 'time': '1428884269'}], 'times': [1428884269]}) """ def __init__(self): self.bare_queue = vdm.BareQueue() self.tagb_queue = tag_block.TagQueue() self.uscg_queue = uscg.UscgQueue() self.line_num = 0 Queue.Queue.__init__(self) def put(self, line, line_num=None): """Add a line to the queue. Args: line: str, Text to add to the queue. line_num: int, Optional line number override. """ if line_num is not None: self.line_num = line_num else: self.line_num += 1 line_num = self.line_num line = line.rstrip() line_type = nmea.LineType(line) msg = None if line_type == nmea.TEXT: msg = {'line_nums': [self.line_num], 'lines': [line]} elif line_type == nmea.BARE: self.bare_queue.put(line, self.line_num) msg = GetOrNone(self.bare_queue) elif line_type == nmea.TAGB: self.tagb_queue.put(line, self.line_num) msg = GetOrNone(self.tagb_queue) elif line_type == nmea.USCG: self.uscg_queue.put(line, self.line_num) msg = GetOrNone(self.uscg_queue) else: assert False # Should never reach here. if msg: msg['line_type'] = line_type Queue.Queue.put(self, msg) def GetOrNone(self): try: return self.get(block=False) except Queue.Empty: return libais-master/ais/stats.py000066400000000000000000000054311344744644200161150ustar00rootroot00000000000000#!/usr/bin/env python import argparse import datetime import collections import logging import pprint import sys from ais import nmea_queue logger = logging.getLogger('libais') class TrackRange(object): def __init__(self): self.min = None self.max = None def AddValues(self, *values): print ('AddValues', values) values = [v for v in values if v is not None] print ('AV3 ', values, self.min, self.max) if not len(values): raise ValueError('Must specify at least 1 value.') if self.min is None: self.min = min(values) self.max = max(values) return self.min = min(self.min, *values) self.max = max(self.max, *values) class Stats(object): def __init__(self): self.counts = collections.Counter() self.queue = nmea_queue.NmeaQueue() self.time_range = TrackRange() self.time_delta_range = TrackRange() def AddFile(self, iterable, filename=None): self.counts['files'] += 1 for line in iterable: self.AddLine(line) def AddLine(self, line): print(line.rstrip()) self.counts['lines'] += 1 self.queue.put(line) msg = self.queue.GetOrNone() if not msg: return # logging.info('stats found msg: %s', msg) # print () # pprint.pprint(msg) self.counts[msg['line_type']] += 1 if 'decoded' in msg: decoded = msg['decoded'] if 'id' in decoded: self.counts['msg_VDM_%s' % decoded['id']] += 1 if 'msg' in decoded: self.counts['msg_%s' % decoded['msg']] += 1 if 'times' in msg: times = [t for t in msg['times'] if t is not None] if times: if self.time_range.min is None: self.time_range.AddValues(*times) # self.time_delta_range.AddValues(msg['times']) else: # print (self.time_range.min, self.time_range.max) time_delta = max(times) - self.time_range.max self.time_delta_range.AddValues(time_delta) self.time_range.AddValues(*times) def PrintSummary(self): pprint.pprint(self.counts) logger.info('time_range: [%s to %s]', self.time_range.min, self.time_range.max) logger.info('%s', datetime.datetime.utcfromtimestamp(self.time_range.min)) logger.info('%s', datetime.datetime.utcfromtimestamp(self.time_range.max)) logger.info('time_delta_range: [%s to %s]', self.time_delta_range.min, self.time_delta_range.max) def main(): logger.setLevel(logging.INFO) logger.info('in main') parser = argparse.ArgumentParser() parser.add_argument('filenames', type=str, nargs='+', help='NMEA files') args = parser.parse_args() logger.info('args: %s', args) stats = Stats() for filename in args.filenames: stats.AddFile(open(filename), filename) stats.PrintSummary() libais-master/ais/stream/000077500000000000000000000000001344744644200156755ustar00rootroot00000000000000libais-master/ais/stream/__init__.py000066400000000000000000000253561344744644200200210ustar00rootroot00000000000000# Based on ais_normalize.py from https://github.com/schwehr/noaadata import sys import traceback import warnings import ais from ais.stream import checksum warnings.warn( "The stream module is deprecated and will be removed in 1.0", FutureWarning, stacklevel=2 ) def ErrorPrinter(e, stats, verbose=False, max_errors=None, # In % of total number of input lines **kw): if max_errors != None and float(stats["error_num_total"]) / float(stats["line_num"]) * 100.0 > max_errors: raise TooManyErrorsError(**stats) if verbose: sys.stderr.write('%s\n' % e) # def ErrorRaiser(e, stats, **kw): # raise e class StreamError(Exception): description = 'Stream error' def __init__(self, **kw): self.kw = kw self.kw['description'] = self.description def __str__(self): return '%(description)s: %(line_num)s: %(line)s' % self.kw class InvalidChecksumError(StreamError): description = 'Invalid checksum' class InvalidChecksumInConstructedError(StreamError): description = 'Invalid checksum in constructed one-liner' class NoStationFoundError(StreamError): description = 'No station found' class TooFewFieldsError(StreamError): description = 'Too few fields' def __str__(self): return '%(description)s, got %(fields)s but needed 6: %(line_num)s: %(line)s' % self.kw class MissingTimestampsError(StreamError): description = 'Timestamps missing' def __str__(self): return '%(description)s: %(line_num)s: %(line)s, parts: %(parts)s' % self.kw class DifferingTimestampsError(StreamError): description = 'Timestamps not all the same' def __str__(self): return '%(description)s for %(timestamp)s: %(line_num)s: %(line)s, parts: %(parts)s' % self.kw class OnlyMessageEndError(StreamError): description = 'Do not have the preceeding packets for' def __str__(self): return '%(description)s for %(bufferSlot)s:\n%(line)s\n' % self.kw class UnfinishedMessagesError(StreamError): description = 'Unfinished messages at end of file' def __str__(self): return '%(description)s:\n%(buffers)s\n' % self.kw class TooManyErrorsError(StreamError): description = 'Too many errors' def __str__(self): res = dict(self.kw) res['error_lines'] = "" if 'error_num' in res: res['error_lines'] = "\n" + "\n".join(" %s: %s" % (error, num) for error, num in res['error_num'].iteritems()) return '%(description)s: %(error_num_total)s errors in %(line_num)s lines:%(error_lines)s' % res def parseTagBlock(line): if not line.startswith("\\"): return {}, line tagblock, line = line[1:].split("\\", 1) tagblock, checksum = tagblock.rsplit("*", 1) tags = {} for field in tagblock.split(","): key, value = field.split(":") if key == 'c': key = 'timestamp' value = int(value) # This can be either seconds or milliseconds... if value > 40000000000: value = value / 1000.0 elif key == 'n': key = 'line_count' value = int(value) elif key == 'r': key = 'relative_time' value = int(value) elif key == 'd': key = 'destination' elif key == 's': key = 'station' elif key == 't': key = 'text' elif key == 'g': key = 'group' value = dict(zip(["sentence", "groupsize", "id"], [int(part) for part in value.split("-")])) tags["tagblock_" + key] = value return tags, line def add_error_to_stats(e, stats): if "error_num_total" not in stats: stats["error_num_total"] = 0 stats["error_num_total"] += 1 if "error_num" not in stats: stats["error_num"] = {} name = getattr(e, "description", getattr(e, "message", str(type(e)))) if name not in stats["error_num"]: stats["error_num"][name] = 0 stats["error_num"][name] += 1 def normalize(nmea=sys.stdin, uscg=True, validate_checksum=True, allow_unknown=False, window=2, ignore_tagblock_station=False, treat_ab_equal=False, pass_invalid_checksums=False, allow_missing_timestamps=False, errorcb=ErrorPrinter, stats=None, **kw): """Assemble multi-line messages Args: nmea: iterator of lines to read window: number of seconds to allow the later parts of a multiline message to span """ buffers = {} # Put partial messages in a queue by station so that they can be reassembled if stats is None: stats={} stats['line_num'] = stats.pop('line_num', 0) stats['error_num_total'] = stats.pop('error_num_total', 0) stats['invalid_checksums'] = stats.pop('invalid_checksums', 0) def report_error(e): add_error_to_stats(e, stats) errorcb(e, stats, **kw) if not uscg: report_error('Need to make a faster version that does not worry about the extra args and stations dict') assert False for idx, origline in enumerate(nmea): try: tagblock, line = parseTagBlock(origline) line = line.strip() + '\n' # Get rid of DOS issues. stats["line_num"] += 1 if len(line) < 7 or line[3:6] not in ('VDM', 'VDO'): yield tagblock, line, origline continue if validate_checksum and not checksum.isChecksumValid(line): stats["invalid_checksums"] += 1 report_error(InvalidChecksumError(line_num=stats["line_num"], line=line.strip())) if not pass_invalid_checksums: continue fields = line.split(',') if len(fields) < 6: report_error(TooFewFieldsError(line_num=stats["line_num"], line=line.strip(), fields=len(fields))) continue # Total NMEA lines that compose this message [1..9]. totNumSentences = int(fields[1]) if 1 == totNumSentences: # A single line needs no work, so pass it along. yield tagblock, line, origline continue sentenceNum = int(fields[2]) # Message sequence number 1..9 (packetNum) payload = fields[5] # AIS binary data encoded in whacky ways timestamp = fields[-1].strip() # Seconds since Epoch UTC. Always the last field station = None # USCG Receive Stations # if None==station: for i in range(len(fields)-1, 5, -1): if len(fields[i]) and fields[i][0] in ('r', 'b'): station = fields[i] break # Found it so ditch the for loop. if ignore_tagblock_station: tagblock_station = None else: tagblock_station = tagblock.get('tagblock_station', None) if station is None and allow_unknown: station = 'UNKNOWN' if station is None and tagblock_station is None: report_error(NoStationFoundError(line_num=stats["line_num"], line=line.strip())) continue bufferSlot = (tagblock_station, station, fields[3]) # seqId and Channel make a unique stream if not treat_ab_equal: bufferSlot += (fields[4],) # channel id newPacket = {"payload": payload, "timestamp": timestamp, "tagblock": tagblock, "origline": origline} if sentenceNum == 1: buffers[bufferSlot] = [newPacket] # Overwrite any partials continue if totNumSentences == sentenceNum: # Finished a message if bufferSlot not in buffers: report_error(OnlyMessageEndError(line=line, bufferSlot=bufferSlot)) continue buffers[bufferSlot].append(newPacket) parts = buffers[bufferSlot] # Now have all the pieces. del buffers[bufferSlot] # Clear out the used packets to save memory. # Sanity check ok = True ts1 = None for part in parts: try: ts1 = float(part['timestamp']) ts2 = float(timestamp) except ValueError: try: ts1 = float(part['tagblock']['tagblock_timestamp']) ts2 = float(tagblock['tagblock_timestamp']) except: if allow_missing_timestamps: ts1 = 0 ts2 = 0 else: report_error(MissingTimestampsError(line_num=stats["line_num"], line=line.strip(), timestamp=timestamp, parts=parts)) ok = False break if ts1 > ts2+window or ts1 < ts2-window: report_error(DifferingTimestampsError(line_num=stats["line_num"], line=line.strip(), timestamp=timestamp, parts=parts)) ok = False break if not ok: continue payload = ''.join([p['payload'] for p in parts]) tagblock = {} for p in reversed(parts): tagblock.update(p['tagblock']) # Try to mirror the packet as much as possible... same seqId and channel. checksumed_str = ','.join((fields[0], '1,1', fields[3], fields[4], payload, fields[6].split('*')[0]+'*')) if ts1 == 0: # allowed missing timestamp and it is missing if len(fields[7:-1]) == 0: out_str = checksumed_str + checksum.checksumStr(checksumed_str) else: out_str = checksumed_str + checksum.checksumStr(checksumed_str) +',' + ','.join(fields[7:-1]) else: out_str = checksumed_str + checksum.checksumStr(checksumed_str) +',' + ','.join(fields[7:]) if not checksum.isChecksumValid(out_str): report_error(InvalidChecksumInConstructedError(line_num=stats["line_num"], line=line.strip())) out_str = out_str.strip()+'\n' # FIX: Why do I have to do this last strip? origstr = ''.join([p['origline'] for p in parts]) yield tagblock, out_str, origstr continue buffers[bufferSlot].append(newPacket) except TooManyErrorsError: raise except Exception as inst: report_error(inst) if buffers: report_error(UnfinishedMessagesError(buffers=buffers)) def decode(nmea=sys.stdin, errorcb=ErrorPrinter, keep_nmea=False, stats=None, **kw): """Decodes a stream of AIS messages. Takes the same arguments as normalize.""" if stats is None: stats={} def report_error(e): add_error_to_stats(e, stats) errorcb(e, stats, **kw) for tagblock, line, origline in normalize(nmea=nmea, errorcb=errorcb, stats=stats, **kw): try: body = ''.join(line.split(',')[5]) pad = int(line.split('*')[0][-1]) res = ais.decode(body, pad) res.update(tagblock) if keep_nmea: res['nmea'] = origline yield res except TooManyErrorsError: raise except Exception as e: report_error(e) libais-master/ais/stream/checksum.py000077500000000000000000000036671344744644200200700ustar00rootroot00000000000000"""Utilities for working with NMEA strings.""" import re import sys import time nmeaChecksumRegExStr = r"""\,[0-9]\*[0-9A-F][0-9A-F]""" nmeaChecksumRE = re.compile(nmeaChecksumRegExStr) def checksumStr(data): """Take a NMEA 0183 string and compute the checksum. @param data: NMEA message. Leading ?/! and training checksum are optional @type data: str @return: hexidecimal value @rtype: str Checksum is calculated by xor'ing everything between ? or ! and the * >>> checksumStr("!AIVDM,1,1,,B,35MsUdPOh8JwI:0HUwquiIFH21>i,0*09") '09' >>> checksumStr("AIVDM,1,1,,B,35MsUdPOh8JwI:0HUwquiIFH21>i,0") '09' """ # FIX: strip off new line at the end too if data[0]=='!' or data[0]=='?': data = data[1:] if data[-1]=='*': data = data[:-1] if data[-3]=='*': data = data[:-3] # FIX: rename sum to not shadown builting function checksum = 0 for c in data: checksum = checksum ^ ord(c) sum_hex = "%x" % checksum if len(sum_hex) == 1: sum_hex = '0' + sum_hex return sum_hex.upper() def isChecksumValid(nmeaStr, allowTailData=True): """Return True if the string checks out with the checksum. @param allowTailData: Permit handing of Coast Guard format with data after the checksum @param data: NMEA message. Leading ?/! are optional @type data: str @return: True if the checksum matches @rtype: bool >>> isChecksumValid("!AIVDM,1,1,,B,35MsUdPOh8JwI:0HUwquiIFH21>i,0*09") True Corrupted: >>> isChecksumValid("!AIVDM,11,1,,B,35MsUdPOh8JwI:0HUwquiIFH21>i,0*09") False """ if allowTailData: match = nmeaChecksumRE.search(nmeaStr) if not match: return False nmeaStr = nmeaStr[:match.end()] if nmeaStr[-3]!='*': return False # Bad string without proper checksum. checksum=nmeaStr[-2:] if checksum.upper() == checksumStr(nmeaStr).upper(): return True return False libais-master/ais/tag_block.py000066400000000000000000000134151344744644200167050ustar00rootroot00000000000000"""Manage a stream of NMEA TAG block messages. The TAG (Transport, Annotate, and Group) Block messages are defined in NMEA 4.0 section 7. Some providers violate the NMEA standard by using lower case letters in NMEA checksums. This module forces checksums to always be upper case. TODO(schwehr): Add a queue method to drop old groups caches. TODO(schwehr): Catch a wider variety of incomplete groups. TODO(schwehr): Compute running stats in the queue. """ import hashlib import logging import re import six import six.moves.queue as Queue import ais from ais import nmea from ais import nmea_messages from ais import util from ais import vdm logger = logging.getLogger('libais') # Added a decimal value to time beyond the normal TAG BLOCK spec. TAG_BLOCK_RE = re.compile(r""" (\\ (?P( ( c:(?P