pax_global_header00006660000000000000000000000064145763453650014534gustar00rootroot0000000000000052 comment=036f726adc44aebdbd0e6009364697fcc445f9b6 ocaml-ffmpeg-1.1.11/000077500000000000000000000000001457634536500141525ustar00rootroot00000000000000ocaml-ffmpeg-1.1.11/.github/000077500000000000000000000000001457634536500155125ustar00rootroot00000000000000ocaml-ffmpeg-1.1.11/.github/workflows/000077500000000000000000000000001457634536500175475ustar00rootroot00000000000000ocaml-ffmpeg-1.1.11/.github/workflows/ci.yml000066400000000000000000000005351457634536500206700ustar00rootroot00000000000000name: CI on: [push] concurrency: group: ${{ github.workflow }}-${{ github.ref }} cancel-in-progress: true jobs: build: runs-on: ${{ matrix.os }} strategy: fail-fast: false matrix: os: [macos-latest, ubuntu-latest] steps: - name: Build and test module uses: savonet/build-and-test-ocaml-module@main ocaml-ffmpeg-1.1.11/.github/workflows/doc.yml000066400000000000000000000011621457634536500210370ustar00rootroot00000000000000name: Doc build on: push: branches: - main jobs: build_doc: runs-on: ubuntu-latest steps: - name: Checkout code uses: actions/checkout@v1 - name: Setup OCaml uses: ocaml/setup-ocaml@v2 with: ocaml-compiler: 4.14.x - name: Pin locally run: opam pin -y add --no-action . - name: Install locally run: opam install -y --with-doc ffmpeg - name: Build doc run: opam exec dune build @doc - name: Deploy doc uses: JamesIves/github-pages-deploy-action@4.1.4 with: branch: gh-pages folder: _build/default/_doc/_html ocaml-ffmpeg-1.1.11/.gitignore000066400000000000000000000000421457634536500161360ustar00rootroot00000000000000_build/ *.install .*sw* .merlin *~ocaml-ffmpeg-1.1.11/.gitmodules000066400000000000000000000001031457634536500163210ustar00rootroot00000000000000[submodule "m4"] path = m4 url = git://github.com/savonet/m4.git ocaml-ffmpeg-1.1.11/.ocamlformat000066400000000000000000000003371457634536500164620ustar00rootroot00000000000000version=0.25.1 profile = conventional break-separators = after space-around-lists = false doc-comments = before match-indent = 2 match-indent-nested = always parens-ite exp-grouping = preserve module-item-spacing = compact ocaml-ffmpeg-1.1.11/CHANGES000066400000000000000000000062741457634536500151560ustar00rootroot000000000000001.1.11 (2024-03-19) ====== * Fix segfault with the new OCaml 5 concurrent GC. 1.1.10 (2024-01-17) ====== * Fixed codec attribute for AAC. * Added non-interleaved API for Av. 1.1.9 (2023-10-27) ===== * Added support for AV_CODEC_ID_WRAPPED_AVFRAME as valid audio/video codec. * Added AV_CODEC_ID_NONE to all codec id classes to prevent unecessary failures when passing it as detected coded_id. 1.1.8 (2023-07-01) ===== * Added asynchronous stream copy creation API. * Added `pkt_dts` API for AVFrame. * Raise exception when re-using a closed container. * Move caml_remove_generational_global_root to ocaml finalizers to follow OCaml 5 memory model. * Added `Avutil.Frame.{duration, set_duration}` * Reset metadata before setting new value. * Fix min/max filter option values for int and int64 type. * Add flush API to avfilter. * Add support for data streams. 1.1.7 (2023-02-08) ===== * Filter `NaN` when converting floats to and from OCaml in `swresample`. 1.1.6 (2022-09-26) ====== * Fix channel layout breakage with recent FFmpeg with new channel layout API. * Fix memory leak with streams. * Cleanup internal C binding. 1.1.5 (2022-08-28) ====== * Fix segfault when flushing an encoder that hasn't been started yet. * Added flac to codec_attr. 1.1.4 (2022-06-18) ===== * Added codec descriptor API. * Added bitstream filters API * Fixed issues with unimplements AvOptions types. * Fixed memory cleanup when raising exception in `open_input` (#55) 1.1.3 (2022-04-06) ===== * Use size of memory referred by AVFrame and AVPacket when allocating OCaml frame custom block to make sure the GC is diligent when cleaning up OCaml values referring to AVFrame and AVPacket. 1.1.2 (2022-03-13) ===== * Change error returned on OCaml callback exception to AVERROR_EXTERNAL, add log entry. * Get rid of naked pointers. * Adapt to most recent FFmpeg API. * Added offset/length to conversion functions. 1.1.1 (2021-12-26) ====== * Added support for interruption callback. 1.1.0 (2021-11-27) ===== * Added support for filter commands. * Make [Av.get_pixel_aspect] optional. * Rename [Avfilter.sample_aspect_ratio] into [Avfilter.pixel_aspect], make it optional. 1.0.2 (2021-11-19) ===== * Fix deadlock when raising exceptions in callbacks. 1.0.1 (2021-10-02) ===== * Fixed segfault when using `Field(v, i) = v'` instead of `Store_field(v, i, v')` * Fixed memory leak when avio callback raised exception. 1.0.0 (2021-09-14) ===== * Switch to dune * Heavy rework of the whole API 0.4.2 (2020-07-04) ===== * Adapt to new 4.3 headers. 0.4.1 (2019-09-26) ===== * Added Avutil.Audio.frame_get_sample_format. 0.4.0 (09-22-2019) ===== * Added support for stream output. * Updated stream and container creation API to be able to use any arbitrary FFmpeg parameter. 0.3.0 (08-22-2019) ===== * Implement support for streams * Cleanup internal logic * Fix build on bytecode architecture (#27) 0.2.1 (11-12-2018) ===== * Fix dist. 0.2.0 (09-09-2018) ===== * Add support for demuxing and resampling (#2). * Add audio and video encoding. * Add avdevice. * Add codec parameters structure. 0.1.2 (05-23-2016) ===== * Compatibility with the latest ffmpeg. 0.1.1 (03-08-2015) ===== * Dummy github release 0.1.0 (18-02-2013) ===== * Initial release. ocaml-ffmpeg-1.1.11/COPYING000066400000000000000000000634761457634536500152250ustar00rootroot00000000000000 GNU LESSER GENERAL PUBLIC LICENSE Version 2.1, February 1999 Copyright (C) 1991, 1999 Free Software Foundation, Inc. 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA Everyone is permitted to copy and distribute verbatim copies of this license document, but changing it is not allowed. [This is the first released version of the Lesser GPL. It also counts as the successor of the GNU Library Public License, version 2, hence the version number 2.1.] Preamble The licenses for most software are designed to take away your freedom to share and change it. By contrast, the GNU General Public Licenses are intended to guarantee your freedom to share and change free software--to make sure the software is free for all its users. This license, the Lesser General Public License, applies to some specially designated software packages--typically libraries--of the Free Software Foundation and other authors who decide to use it. You can use it too, but we suggest you first think carefully about whether this license or the ordinary General Public License is the better strategy to use in any particular case, based on the explanations below. When we speak of free software, we are referring to freedom of use, not price. Our General Public Licenses are designed to make sure that you have the freedom to distribute copies of free software (and charge for this service if you wish); that you receive source code or can get it if you want it; that you can change the software and use pieces of it in new free programs; and that you are informed that you can do these things. To protect your rights, we need to make restrictions that forbid distributors to deny you these rights or to ask you to surrender these rights. These restrictions translate to certain responsibilities for you if you distribute copies of the library or if you modify it. For example, if you distribute copies of the library, whether gratis or for a fee, you must give the recipients all the rights that we gave you. You must make sure that they, too, receive or can get the source code. If you link other code with the library, you must provide complete object files to the recipients, so that they can relink them with the library after making changes to the library and recompiling it. And you must show them these terms so they know their rights. We protect your rights with a two-step method: (1) we copyright the library, and (2) we offer you this license, which gives you legal permission to copy, distribute and/or modify the library. To protect each distributor, we want to make it very clear that there is no warranty for the free library. Also, if the library is modified by someone else and passed on, the recipients should know that what they have is not the original version, so that the original author's reputation will not be affected by problems that might be introduced by others. Finally, software patents pose a constant threat to the existence of any free program. We wish to make sure that a company cannot effectively restrict the users of a free program by obtaining a restrictive license from a patent holder. Therefore, we insist that any patent license obtained for a version of the library must be consistent with the full freedom of use specified in this license. Most GNU software, including some libraries, is covered by the ordinary GNU General Public License. This license, the GNU Lesser General Public License, applies to certain designated libraries, and is quite different from the ordinary General Public License. We use this license for certain libraries in order to permit linking those libraries into non-free programs. When a program is linked with a library, whether statically or using a shared library, the combination of the two is legally speaking a combined work, a derivative of the original library. The ordinary General Public License therefore permits such linking only if the entire combination fits its criteria of freedom. The Lesser General Public License permits more lax criteria for linking other code with the library. We call this license the "Lesser" General Public License because it does Less to protect the user's freedom than the ordinary General Public License. It also provides other free software developers Less of an advantage over competing non-free programs. These disadvantages are the reason we use the ordinary General Public License for many libraries. However, the Lesser license provides advantages in certain special circumstances. For example, on rare occasions, there may be a special need to encourage the widest possible use of a certain library, so that it becomes a de-facto standard. To achieve this, non-free programs must be allowed to use the library. A more frequent case is that a free library does the same job as widely used non-free libraries. In this case, there is little to gain by limiting the free library to free software only, so we use the Lesser General Public License. In other cases, permission to use a particular library in non-free programs enables a greater number of people to use a large body of free software. For example, permission to use the GNU C Library in non-free programs enables many more people to use the whole GNU operating system, as well as its variant, the GNU/Linux operating system. Although the Lesser General Public License is Less protective of the users' freedom, it does ensure that the user of a program that is linked with the Library has the freedom and the wherewithal to run that program using a modified version of the Library. The precise terms and conditions for copying, distribution and modification follow. Pay close attention to the difference between a "work based on the library" and a "work that uses the library". The former contains code derived from the library, whereas the latter must be combined with the library in order to run. GNU LESSER GENERAL PUBLIC LICENSE TERMS AND CONDITIONS FOR COPYING, DISTRIBUTION AND MODIFICATION 0. This License Agreement applies to any software library or other program which contains a notice placed by the copyright holder or other authorized party saying it may be distributed under the terms of this Lesser General Public License (also called "this License"). Each licensee is addressed as "you". A "library" means a collection of software functions and/or data prepared so as to be conveniently linked with application programs (which use some of those functions and data) to form executables. The "Library", below, refers to any such software library or work which has been distributed under these terms. A "work based on the Library" means either the Library or any derivative work under copyright law: that is to say, a work containing the Library or a portion of it, either verbatim or with modifications and/or translated straightforwardly into another language. (Hereinafter, translation is included without limitation in the term "modification".) "Source code" for a work means the preferred form of the work for making modifications to it. For a library, complete source code means all the source code for all modules it contains, plus any associated interface definition files, plus the scripts used to control compilation and installation of the library. Activities other than copying, distribution and modification are not covered by this License; they are outside its scope. The act of running a program using the Library is not restricted, and output from such a program is covered only if its contents constitute a work based on the Library (independent of the use of the Library in a tool for writing it). Whether that is true depends on what the Library does and what the program that uses the Library does. 1. You may copy and distribute verbatim copies of the Library's complete source code as you receive it, in any medium, provided that you conspicuously and appropriately publish on each copy an appropriate copyright notice and disclaimer of warranty; keep intact all the notices that refer to this License and to the absence of any warranty; and distribute a copy of this License along with the Library. You may charge a fee for the physical act of transferring a copy, and you may at your option offer warranty protection in exchange for a fee. 2. You may modify your copy or copies of the Library or any portion of it, thus forming a work based on the Library, and copy and distribute such modifications or work under the terms of Section 1 above, provided that you also meet all of these conditions: a) The modified work must itself be a software library. b) You must cause the files modified to carry prominent notices stating that you changed the files and the date of any change. c) You must cause the whole of the work to be licensed at no charge to all third parties under the terms of this License. d) If a facility in the modified Library refers to a function or a table of data to be supplied by an application program that uses the facility, other than as an argument passed when the facility is invoked, then you must make a good faith effort to ensure that, in the event an application does not supply such function or table, the facility still operates, and performs whatever part of its purpose remains meaningful. (For example, a function in a library to compute square roots has a purpose that is entirely well-defined independent of the application. Therefore, Subsection 2d requires that any application-supplied function or table used by this function must be optional: if the application does not supply it, the square root function must still compute square roots.) These requirements apply to the modified work as a whole. If identifiable sections of that work are not derived from the Library, and can be reasonably considered independent and separate works in themselves, then this License, and its terms, do not apply to those sections when you distribute them as separate works. But when you distribute the same sections as part of a whole which is a work based on the Library, the distribution of the whole must be on the terms of this License, whose permissions for other licensees extend to the entire whole, and thus to each and every part regardless of who wrote it. Thus, it is not the intent of this section to claim rights or contest your rights to work written entirely by you; rather, the intent is to exercise the right to control the distribution of derivative or collective works based on the Library. In addition, mere aggregation of another work not based on the Library with the Library (or with a work based on the Library) on a volume of a storage or distribution medium does not bring the other work under the scope of this License. 3. You may opt to apply the terms of the ordinary GNU General Public License instead of this License to a given copy of the Library. To do this, you must alter all the notices that refer to this License, so that they refer to the ordinary GNU General Public License, version 2, instead of to this License. (If a newer version than version 2 of the ordinary GNU General Public License has appeared, then you can specify that version instead if you wish.) Do not make any other change in these notices. Once this change is made in a given copy, it is irreversible for that copy, so the ordinary GNU General Public License applies to all subsequent copies and derivative works made from that copy. This option is useful when you wish to copy part of the code of the Library into a program that is not a library. 4. You may copy and distribute the Library (or a portion or derivative of it, under Section 2) in object code or executable form under the terms of Sections 1 and 2 above provided that you accompany it with the complete corresponding machine-readable source code, which must be distributed under the terms of Sections 1 and 2 above on a medium customarily used for software interchange. If distribution of object code is made by offering access to copy from a designated place, then offering equivalent access to copy the source code from the same place satisfies the requirement to distribute the source code, even though third parties are not compelled to copy the source along with the object code. 5. A program that contains no derivative of any portion of the Library, but is designed to work with the Library by being compiled or linked with it, is called a "work that uses the Library". Such a work, in isolation, is not a derivative work of the Library, and therefore falls outside the scope of this License. However, linking a "work that uses the Library" with the Library creates an executable that is a derivative of the Library (because it contains portions of the Library), rather than a "work that uses the library". The executable is therefore covered by this License. Section 6 states terms for distribution of such executables. When a "work that uses the Library" uses material from a header file that is part of the Library, the object code for the work may be a derivative work of the Library even though the source code is not. Whether this is true is especially significant if the work can be linked without the Library, or if the work is itself a library. The threshold for this to be true is not precisely defined by law. If such an object file uses only numerical parameters, data structure layouts and accessors, and small macros and small inline functions (ten lines or less in length), then the use of the object file is unrestricted, regardless of whether it is legally a derivative work. (Executables containing this object code plus portions of the Library will still fall under Section 6.) Otherwise, if the work is a derivative of the Library, you may distribute the object code for the work under the terms of Section 6. Any executables containing that work also fall under Section 6, whether or not they are linked directly with the Library itself. 6. As an exception to the Sections above, you may also combine or link a "work that uses the Library" with the Library to produce a work containing portions of the Library, and distribute that work under terms of your choice, provided that the terms permit modification of the work for the customer's own use and reverse engineering for debugging such modifications. You must give prominent notice with each copy of the work that the Library is used in it and that the Library and its use are covered by this License. You must supply a copy of this License. If the work during execution displays copyright notices, you must include the copyright notice for the Library among them, as well as a reference directing the user to the copy of this License. Also, you must do one of these things: a) Accompany the work with the complete corresponding machine-readable source code for the Library including whatever changes were used in the work (which must be distributed under Sections 1 and 2 above); and, if the work is an executable linked with the Library, with the complete machine-readable "work that uses the Library", as object code and/or source code, so that the user can modify the Library and then relink to produce a modified executable containing the modified Library. (It is understood that the user who changes the contents of definitions files in the Library will not necessarily be able to recompile the application to use the modified definitions.) b) Use a suitable shared library mechanism for linking with the Library. A suitable mechanism is one that (1) uses at run time a copy of the library already present on the user's computer system, rather than copying library functions into the executable, and (2) will operate properly with a modified version of the library, if the user installs one, as long as the modified version is interface-compatible with the version that the work was made with. c) Accompany the work with a written offer, valid for at least three years, to give the same user the materials specified in Subsection 6a, above, for a charge no more than the cost of performing this distribution. d) If distribution of the work is made by offering access to copy from a designated place, offer equivalent access to copy the above specified materials from the same place. e) Verify that the user has already received a copy of these materials or that you have already sent this user a copy. For an executable, the required form of the "work that uses the Library" must include any data and utility programs needed for reproducing the executable from it. However, as a special exception, the materials to be distributed need not include anything that is normally distributed (in either source or binary form) with the major components (compiler, kernel, and so on) of the operating system on which the executable runs, unless that component itself accompanies the executable. It may happen that this requirement contradicts the license restrictions of other proprietary libraries that do not normally accompany the operating system. Such a contradiction means you cannot use both them and the Library together in an executable that you distribute. 7. You may place library facilities that are a work based on the Library side-by-side in a single library together with other library facilities not covered by this License, and distribute such a combined library, provided that the separate distribution of the work based on the Library and of the other library facilities is otherwise permitted, and provided that you do these two things: a) Accompany the combined library with a copy of the same work based on the Library, uncombined with any other library facilities. This must be distributed under the terms of the Sections above. b) Give prominent notice with the combined library of the fact that part of it is a work based on the Library, and explaining where to find the accompanying uncombined form of the same work. 8. You may not copy, modify, sublicense, link with, or distribute the Library except as expressly provided under this License. Any attempt otherwise to copy, modify, sublicense, link with, or distribute the Library is void, and will automatically terminate your rights under this License. However, parties who have received copies, or rights, from you under this License will not have their licenses terminated so long as such parties remain in full compliance. 9. You are not required to accept this License, since you have not signed it. However, nothing else grants you permission to modify or distribute the Library or its derivative works. These actions are prohibited by law if you do not accept this License. Therefore, by modifying or distributing the Library (or any work based on the Library), you indicate your acceptance of this License to do so, and all its terms and conditions for copying, distributing or modifying the Library or works based on it. 10. Each time you redistribute the Library (or any work based on the Library), the recipient automatically receives a license from the original licensor to copy, distribute, link with or modify the Library subject to these terms and conditions. You may not impose any further restrictions on the recipients' exercise of the rights granted herein. You are not responsible for enforcing compliance by third parties with this License. 11. If, as a consequence of a court judgment or allegation of patent infringement or for any other reason (not limited to patent issues), conditions are imposed on you (whether by court order, agreement or otherwise) that contradict the conditions of this License, they do not excuse you from the conditions of this License. If you cannot distribute so as to satisfy simultaneously your obligations under this License and any other pertinent obligations, then as a consequence you may not distribute the Library at all. For example, if a patent license would not permit royalty-free redistribution of the Library by all those who receive copies directly or indirectly through you, then the only way you could satisfy both it and this License would be to refrain entirely from distribution of the Library. If any portion of this section is held invalid or unenforceable under any particular circumstance, the balance of the section is intended to apply, and the section as a whole is intended to apply in other circumstances. It is not the purpose of this section to induce you to infringe any patents or other property right claims or to contest validity of any such claims; this section has the sole purpose of protecting the integrity of the free software distribution system which is implemented by public license practices. Many people have made generous contributions to the wide range of software distributed through that system in reliance on consistent application of that system; it is up to the author/donor to decide if he or she is willing to distribute software through any other system and a licensee cannot impose that choice. This section is intended to make thoroughly clear what is believed to be a consequence of the rest of this License. 12. If the distribution and/or use of the Library is restricted in certain countries either by patents or by copyrighted interfaces, the original copyright holder who places the Library under this License may add an explicit geographical distribution limitation excluding those countries, so that distribution is permitted only in or among countries not thus excluded. In such case, this License incorporates the limitation as if written in the body of this License. 13. The Free Software Foundation may publish revised and/or new versions of the Lesser General Public License from time to time. Such new versions will be similar in spirit to the present version, but may differ in detail to address new problems or concerns. Each version is given a distinguishing version number. If the Library specifies a version number of this License which applies to it and "any later version", you have the option of following the terms and conditions either of that version or of any later version published by the Free Software Foundation. If the Library does not specify a license version number, you may choose any version ever published by the Free Software Foundation. 14. If you wish to incorporate parts of the Library into other free programs whose distribution conditions are incompatible with these, write to the author to ask for permission. For software which is copyrighted by the Free Software Foundation, write to the Free Software Foundation; we sometimes make exceptions for this. Our decision will be guided by the two goals of preserving the free status of all derivatives of our free software and of promoting the sharing and reuse of software generally. NO WARRANTY 15. BECAUSE THE LIBRARY IS LICENSED FREE OF CHARGE, THERE IS NO WARRANTY FOR THE LIBRARY, TO THE EXTENT PERMITTED BY APPLICABLE LAW. EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT HOLDERS AND/OR OTHER PARTIES PROVIDE THE LIBRARY "AS IS" WITHOUT WARRANTY OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE LIBRARY IS WITH YOU. SHOULD THE LIBRARY PROVE DEFECTIVE, YOU ASSUME THE COST OF ALL NECESSARY SERVICING, REPAIR OR CORRECTION. 16. IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MAY MODIFY AND/OR REDISTRIBUTE THE LIBRARY AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE USE OR INABILITY TO USE THE LIBRARY (INCLUDING BUT NOT LIMITED TO LOSS OF DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD PARTIES OR A FAILURE OF THE LIBRARY TO OPERATE WITH ANY OTHER SOFTWARE), EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH DAMAGES. END OF TERMS AND CONDITIONS How to Apply These Terms to Your New Libraries If you develop a new library, and you want it to be of the greatest possible use to the public, we recommend making it free software that everyone can redistribute and change. You can do so by permitting redistribution under these terms (or, alternatively, under the terms of the ordinary General Public License). To apply these terms, attach the following notices to the library. It is safest to attach them to the start of each source file to most effectively convey the exclusion of warranty; and each file should have at least the "copyright" line and a pointer to where the full notice is found. Copyright (C) This library is free software; you can redistribute it and/or modify it under the terms of the GNU Lesser General Public License as published by the Free Software Foundation; either version 2.1 of the License, or (at your option) any later version. This library is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more details. You should have received a copy of the GNU Lesser General Public License along with this library; if not, write to the Free Software Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA Also add information on how to contact you by electronic and paper mail. You should also get your employer (if you work as a programmer) or your school, if any, to sign a "copyright disclaimer" for the library, if necessary. Here is a sample; alter the names: Yoyodyne, Inc., hereby disclaims all copyright interest in the library `Frob' (a library for tweaking knobs) written by James Random Hacker. , 1 April 1990 Ty Coon, President of Vice That's all there is to it! ocaml-ffmpeg-1.1.11/README.md000066400000000000000000000101361457634536500154320ustar00rootroot00000000000000ocaml-ffmpeg ============ ![GitHub](https://img.shields.io/github/license/savonet/ocaml-ffmpeg) ![CI](https://github.com/savonet/ocaml-ffmpeg/workflows/CI/badge.svg) ![GitHub release (latest by date)](https://img.shields.io/github/v/release/savonet/ocaml-ffmpeg) ocaml-ffmpeg is an OCaml interface for the [FFmpeg](http://ffmpeg.org/) Multimedia framework. The modules currently available are : `Avutil` : base module containing the share types and utilities `Avcodec` : the module containing decoders and encoders for audio, video and subtitle codecs. `Av` : the module containing demuxers and muxers for reading and writing multimedia container formats. `Avdevice` : the module containing input and output devices for grabbing from and rendering to many common multimedia input/output software frameworks. `Avfilter` : the module containing audio and video filters. `Swresample` : the module performing audio resampling, rematrixing and sample format conversion operations. `Swscale` : the module performing image scaling and color space/pixel format conversion operations. Please read the COPYING file before using this software. Documentation: ============= The [API documentation is available here](http://www.liquidsoap.info/ocaml-ffmpeg/). Prerequisites: ============== - ocaml - FFmpeg - dune - findlib See [dune-project](dune-project) file for versions. Installation: ============= The preferred installation method is via [opam](http://opam.ocaml.org/): ``` opam install ffmpeg ``` This will install the latest release of all ffmpeg-related modules. You can also install individual modules, for instance: ``` opam install ffmpeg-avcodec ffmpeg-avfilter ``` If you wish to install the latest code from this repository, you can do: ``` opam install . ``` From within this repository. Compilation: ============ ``` dune build ``` Examples: ============= The [audio_decoding](https://github.com/savonet/ocaml-ffmpeg/blob/master/examples/audio_decoding.ml) example shows how to read frames from an audio file and convert them into bytes. The [audio_device](https://github.com/savonet/ocaml-ffmpeg/blob/master/examples/audio_device.ml) example shows how to read 500 audio frames from an input audio device or an URL and write them into an output audio device or a file. The [decode_audio](https://github.com/savonet/ocaml-ffmpeg/blob/master/examples/decode_audio.ml) example shows how to parse packets from a mapped file, decode them and write the resulting frames into a file. The [demuxing_decoding](https://github.com/savonet/ocaml-ffmpeg/blob/master/examples/demuxing_decoding.ml) example shows how to demuxing and decoding audio, video and subtitle frames from a file, converts them into bytes and write them in raw files. The [encode_audio](https://github.com/savonet/ocaml-ffmpeg/blob/master/examples/encode_audio.ml) example shows how to convert a float array into stereo frames and encode them into packets. The [encode_video](https://github.com/savonet/ocaml-ffmpeg/blob/master/examples/encode_video.ml) example shows how to create video frames and write them encoded into a file. The [encoding](https://github.com/savonet/ocaml-ffmpeg/blob/master/examples/encoding.ml) example shows how to create a multimedia file with audio and video streams. The [player](https://github.com/savonet/ocaml-ffmpeg/blob/master/examples/player.ml) example shows how to read a multimedia file and write audio and video frames to output devices. The [remuxing](https://github.com/savonet/ocaml-ffmpeg/blob/master/examples/remuxing.ml) example shows how to remuxing multimedia file packets without decoding them. The [transcode_aac](https://github.com/savonet/ocaml-ffmpeg/blob/master/examples/transcode_aac.ml) example shows how to transcode an audio file into an AAC audio file. The [transcoding](https://github.com/savonet/ocaml-ffmpeg/blob/master/examples/transcoding.ml) example shows how to transcode audio streams into the AAC codec, video streams into the H264 codec and write them to an output file. Author: ======= This author of this software may be contacted by electronic mail at the following address: savonet-users@lists.sourceforge.net. ocaml-ffmpeg-1.1.11/av/000077500000000000000000000000001457634536500145605ustar00rootroot00000000000000ocaml-ffmpeg-1.1.11/av/av.ml000066400000000000000000000320761457634536500155300ustar00rootroot00000000000000open Avutil external init : unit -> unit = "ocaml_av_init" [@@noalloc] let () = init () external container_options : unit -> Options.t = "ocaml_av_container_options" let container_options = container_options () (* Format *) module Format = struct external get_input_name : (input, _) format -> string = "ocaml_av_input_format_get_name" external get_input_long_name : (input, _) format -> string = "ocaml_av_input_format_get_long_name" external find_input_format : string -> (input, 'a) format = "ocaml_av_find_input_format" let find_input_format name = try Some (find_input_format name) with Not_found -> None external get_output_name : (output, _) format -> string = "ocaml_av_output_format_get_name" external get_output_long_name : (output, _) format -> string = "ocaml_av_output_format_get_long_name" external get_audio_codec_id : (output, audio) format -> Avcodec.Audio.id = "ocaml_av_output_format_get_audio_codec_id" external get_video_codec_id : (output, video) format -> Avcodec.Video.id = "ocaml_av_output_format_get_video_codec_id" external get_subtitle_codec_id : (output, subtitle) format -> Avcodec.Subtitle.id = "ocaml_av_output_format_get_subtitle_codec_id" external guess_output_format : string -> string -> string -> (output, 'a) format option = "ocaml_av_output_format_guess" let guess_output_format ?(short_name = "") ?(filename = "") ?(mime = "") () = guess_output_format short_name filename mime end external ocaml_av_cleanup_av : _ container -> unit = "ocaml_av_cleanup_av" (* Input *) external open_input : string -> (input, _) format option -> (unit -> bool) option -> (string * string) array -> input container * string array = "ocaml_av_open_input" let open_input ?interrupt ?format ?opts url = let opts = opts_default opts in let ret, unused = open_input url format interrupt (mk_opts_array opts) in filter_opts unused opts; Gc.finalise ocaml_av_cleanup_av ret; ret type avio type read = bytes -> int -> int -> int type write = bytes -> int -> int -> int type _seek = int -> int -> int type seek = int -> Unix.seek_command -> int let seek_of_int = function | 0 -> Unix.SEEK_SET | 1 -> Unix.SEEK_CUR | 2 -> Unix.SEEK_END | _ -> assert false external ocaml_av_create_io : int -> read option -> write option -> _seek option -> avio = "ocaml_av_create_io" let _seek_of_seek = function | None -> None | Some fn -> Some (fun a m -> fn a (seek_of_int m)) let ocaml_av_create_read_io len ?seek read = ocaml_av_create_io len (Some read) None (_seek_of_seek seek) external ocaml_av_open_input_stream : avio -> (input, _) format option -> (string * string) array -> input container * string array = "ocaml_av_open_input_stream" let ocaml_av_open_input_stream ?format ?opts avio = let opts = opts_default opts in let ret, unused = ocaml_av_open_input_stream avio format (mk_opts_array opts) in filter_opts unused opts; Gc.finalise ocaml_av_cleanup_av ret; ret external caml_av_input_io_finalise : avio -> unit = "caml_av_input_io_finalise" let open_input_stream ?format ?opts ?seek read = let avio = ocaml_av_create_read_io 4096 ?seek read in let cleanup () = caml_av_input_io_finalise avio in let input = ocaml_av_open_input_stream ?format ?opts avio in Gc.finalise_last cleanup input; input external _get_duration : input container -> int -> Time_format.t -> Int64.t = "ocaml_av_get_duration" let get_input_duration ?(format = `Second) i = match _get_duration i (-1) format with 0L -> None | d -> Some d external _get_metadata : input container -> int -> (string * string) list = "ocaml_av_get_metadata" let get_input_metadata i = List.rev (_get_metadata i (-1)) external input_obj : input container -> 'a = "ocaml_av_input_obj" let input_obj c = Obj.magic (input_obj c, c) (* Input Stream *) type ('a, 'b, 'c) stream = { container : 'a container; index : int; mutable decoder : ('b, Avcodec.decode) Avcodec.codec option; } type media_type = MT_audio | MT_video | MT_data | MT_subtitle let mk_stream container index = { container; index; decoder = None } external get_codec_params : (_, 'm, _) stream -> 'm Avcodec.params = "ocaml_av_get_stream_codec_parameters" external get_time_base : (_, _, _) stream -> Avutil.rational = "ocaml_av_get_stream_time_base" external set_time_base : (_, _, _) stream -> Avutil.rational -> unit = "ocaml_av_set_stream_time_base" external get_frame_size : (_, audio, _) stream -> int = "ocaml_av_get_stream_frame_size" external get_pixel_aspect : (_, video, _) stream -> Avutil.rational option = "ocaml_av_get_stream_pixel_aspect" external _get_streams : input container -> media_type -> int list = "ocaml_av_get_streams" let get_streams input media_type = _get_streams input media_type |> List.rev_map (fun i -> let s = mk_stream input i in (i, s, get_codec_params s)) let get_audio_streams input = get_streams input MT_audio let get_video_streams input = get_streams input MT_video let get_subtitle_streams input = get_streams input MT_subtitle let get_data_streams input = get_streams input MT_data let set_decoder s decoder = s.decoder <- Some decoder external _find_best_stream : input container -> media_type -> int = "ocaml_av_find_best_stream" let find_best_stream c t = let i = _find_best_stream c t in let s = mk_stream c i in (i, s, get_codec_params s) let find_best_audio_stream c = find_best_stream c MT_audio let find_best_video_stream c = find_best_stream c MT_video let find_best_subtitle_stream c = find_best_stream c MT_subtitle let get_input s = s.container let get_index s = s.index let get_duration ?(format = `Second) s = _get_duration s.container s.index format let get_metadata s = List.rev (_get_metadata s.container s.index) type input_result = [ `Audio_packet of int * audio Avcodec.Packet.t | `Audio_frame of int * audio frame | `Video_packet of int * video Avcodec.Packet.t | `Video_frame of int * video frame | `Subtitle_packet of int * subtitle Avcodec.Packet.t | `Subtitle_frame of int * subtitle frame | `Data_packet of int * [ `Data ] Avcodec.Packet.t ] (** Reads the selected streams if any or all streams otherwise. *) external read_input : (int * Avutil.media_type) array -> (int * ('a, Avcodec.decode) Avcodec.codec option) array -> input container -> input_result = "ocaml_av_read_input" let _get_packet media_type input = List.map (fun { index; container; _ } -> if container != input then raise (Failure "Inconsistent stream and input!"); (index, media_type)) let _get_frame input = List.map (fun { index; container; decoder } -> if container != input then raise (Failure "Inconsistent stream and input!"); (index, Obj.magic decoder)) let read_input ?(audio_packet = []) ?(audio_frame = []) ?(video_packet = []) ?(video_frame = []) ?(subtitle_packet = []) ?(subtitle_frame = []) ?(data_packet = []) input = let packet = Array.of_list (_get_packet `Audio input audio_packet @ _get_packet `Video input video_packet @ _get_packet `Subtitle input subtitle_packet @ _get_packet `Data input data_packet) in let frame = Array.of_list (_get_frame input audio_frame @ _get_frame input video_frame @ _get_frame input subtitle_frame) in read_input packet frame input type seek_flag = | Seek_flag_backward | Seek_flag_byte | Seek_flag_any | Seek_flag_frame external seek : flags:seek_flag array -> ?stream:(input, _, _) stream -> ?min_ts:Int64.t -> ?max_ts:Int64.t -> fmt:Time_format.t -> ts:Int64.t -> input container -> unit = "ocaml_av_seek_bytecode" "ocaml_av_seek_native" let seek ?(flags = []) = seek ~flags:(Array.of_list flags) (* Output *) external open_output : ?interrupt:(unit -> bool) -> ?format:(output, _) format -> string -> bool -> (string * string) array -> output container * string array = "ocaml_av_open_output" let open_output ?interrupt ?format ?(interleaved = true) ?opts fname = let opts = opts_default opts in let ret, unused = open_output ?interrupt ?format fname interleaved (mk_opts_array opts) in filter_opts unused opts; Gc.finalise ocaml_av_cleanup_av ret; ret external ocaml_av_open_output_stream : (output, _) format -> avio -> bool -> (string * string) array -> output container * string array = "ocaml_av_open_output_stream" let open_output_stream ?opts ?(interleaved = true) ?seek write format = let opts = opts_default opts in let avio = ocaml_av_create_io 4096 None (Some write) (_seek_of_seek seek) in let cleanup () = caml_av_input_io_finalise avio in let output, unused = ocaml_av_open_output_stream format avio interleaved (mk_opts_array opts) in filter_opts unused opts; Gc.finalise ocaml_av_cleanup_av output; Gc.finalise_last cleanup output; output external output_started : output container -> bool = "ocaml_av_header_written" external _set_metadata : _ container -> int -> (string * string) array -> unit = "ocaml_av_set_metadata" let set_output_metadata o tags = _set_metadata o (-1) (Array.of_list tags) let set_metadata s tags = _set_metadata s.container s.index (Array.of_list tags) let get_output s = s.container type uninitialized_stream_copy = output container * int external new_uninitialized_stream_copy : output container -> int = "ocaml_av_new_uninitialized_stream_copy" let new_uninitialized_stream_copy container = (container, new_uninitialized_stream_copy container) external initialize_stream_copy : output container -> int -> _ Avcodec.params -> unit = "ocaml_av_initialize_stream_copy" let initialize_stream_copy ~params (container, index) = initialize_stream_copy container index params; mk_stream container index let new_stream_copy ~params container = initialize_stream_copy ~params (new_uninitialized_stream_copy container) external new_audio_stream : _ container -> int -> [ `Encoder ] Avcodec.Audio.t -> int -> (string * string) array -> int * string array = "ocaml_av_new_audio_stream" let new_audio_stream ?opts ?channels ?channel_layout ~sample_rate ~sample_format ~time_base ~codec container = let opts = mk_audio_opts ?opts ?channels ?channel_layout ~sample_rate ~sample_format ~time_base () in let channels = match (channels, channel_layout) with | Some n, _ -> n | None, Some layout -> Avutil.Channel_layout.get_nb_channels layout | None, None -> raise (Error (`Failure "At least one of channels or channel_layout must be passed!")) in let ret, unused = new_audio_stream container (Sample_format.get_id sample_format) codec channels (mk_opts_array opts) in filter_opts unused opts; mk_stream container ret external new_video_stream : ?device_context:Avutil.HwContext.device_context -> ?frame_context:Avutil.HwContext.frame_context -> _ container -> [ `Encoder ] Avcodec.Video.t -> (string * string) array -> int * string array = "ocaml_av_new_video_stream" let new_video_stream ?opts ?frame_rate ?hardware_context ~pixel_format ~width ~height ~time_base ~codec container = let opts = mk_video_opts ?opts ?frame_rate ~pixel_format ~width ~height ~time_base () in let device_context, frame_context = match hardware_context with | None -> (None, None) | Some (`Device_context hardware_context) -> (Some hardware_context, None) | Some (`Frame_context frame_context) -> (None, Some frame_context) in let ret, unused = new_video_stream ?device_context ?frame_context container codec (mk_opts_array opts) in filter_opts unused opts; mk_stream container ret external new_subtitle_stream : _ container -> [ `Encoder ] Avcodec.Subtitle.t -> (string * string) array -> int * string array = "ocaml_av_new_subtitle_stream" let new_subtitle_stream ?opts ~time_base ~codec container = let opts = opts_default opts in Hashtbl.add opts "time_base" (`String (Avutil.string_of_rational time_base)); let ret, unused = new_subtitle_stream container codec (mk_opts_array opts) in filter_opts unused opts; mk_stream container ret external new_data_stream : _ container -> Avcodec.Unknown.id -> Avutil.rational -> int = "ocaml_av_new_data_stream" let new_data_stream ~time_base ~codec container = let ret = new_data_stream container codec time_base in mk_stream container ret external codec_attr : _ stream -> string option = "ocaml_av_codec_attr" external bitrate : _ stream -> int option = "ocaml_av_stream_bitrate" external write_packet : (output, 'media, [ `Packet ]) stream -> Avutil.rational -> 'media Avcodec.Packet.t -> unit = "ocaml_av_write_stream_packet" external write_frame : (output, 'media, [ `Frame ]) stream -> 'media frame -> unit = "ocaml_av_write_stream_frame" external flush : output container -> unit = "ocaml_av_flush" external was_keyframe : (output, _, _) stream -> bool = "ocaml_av_was_keyframe" external write_audio_frame : output container -> audio frame -> unit = "ocaml_av_write_audio_frame" external write_video_frame : output container -> video frame -> unit = "ocaml_av_write_video_frame" external close : _ container -> unit = "ocaml_av_close" ocaml-ffmpeg-1.1.11/av/av.mli000066400000000000000000000346031457634536500156770ustar00rootroot00000000000000(** This module perform demuxing then decoding for reading and coding then muxing for writing multimedia container formats. *) open Avutil (* A value suitable for listing available options on a given container. *) val container_options : Options.t (** {5 Format} *) module Format : sig (** Return the name of the input format *) val get_input_name : (input, _) format -> string (** Return the long name of the input format *) val get_input_long_name : (input, _) format -> string (** Guess input format based on its short name. *) val find_input_format : string -> (input, 'a) format option (** Return the name of the output format *) val get_output_name : (output, _) format -> string (** Return the long name of the output format *) val get_output_long_name : (output, _) format -> string (** Guess output format based on the passed arguments. *) val guess_output_format : ?short_name:string -> ?filename:string -> ?mime:string -> unit -> (output, 'a) format option (** Return the audio codec id of the output audio format *) val get_audio_codec_id : (output, audio) format -> Avcodec.Audio.id (** Return the video codec id of the output video format *) val get_video_codec_id : (output, video) format -> Avcodec.Video.id (** Return the subtitle codec id of the output subtitle format *) val get_subtitle_codec_id : (output, subtitle) format -> Avcodec.Subtitle.id end (** {5 Input} *) (** [Av.open_input url] open the input [url] (a file name or http URL). After returning, if [opts] was passed, unused options are left in the hash table. Raise Error if the opening failed. *) val open_input : ?interrupt:(unit -> bool) -> ?format:(input, _) format -> ?opts:opts -> string -> input container type read = bytes -> int -> int -> int type write = bytes -> int -> int -> int type seek = int -> Unix.seek_command -> int (** [Av.open_input_stream read] creates an input stream from the given read callback. Exceptions from the callback are caught and result in a native [Avutil.Error `Unknown] error. *) val open_input_stream : ?format:(input, _) format -> ?opts:opts -> ?seek:seek -> read -> input container (** [Av.get_input_duration ~format:fmt input] return the duration of an [input] in the [fmt] time format (in second by default). *) val get_input_duration : ?format:Time_format.t -> input container -> Int64.t option (** Return the input tag (key, vlue) list. *) val get_input_metadata : input container -> (string * string) list (** Return a value of type [obj], suited for use with [Avutils.Options] getters. *) val input_obj : input container -> Options.obj (** Input/output, audio/video/subtitle, mode stream type *) type ('line, 'media, 'mode) stream (** Return the audio stream list of the input. The result is a list of tuple containing the index of the stream in the container, the stream and the codec of the stream. *) val get_audio_streams : input container -> (int * (input, audio, 'a) stream * audio Avcodec.params) list (** Same as {!Av.get_audio_streams} for the video streams. *) val get_video_streams : input container -> (int * (input, video, 'a) stream * video Avcodec.params) list (** Same as {!Av.get_audio_streams} for the subtitle streams. *) val get_subtitle_streams : input container -> (int * (input, subtitle, 'a) stream * subtitle Avcodec.params) list (** Same as {!Av.get_audio_streams} for the data streams. *) val get_data_streams : input container -> (int * (input, [ `Data ], 'a) stream * [ `Data ] Avcodec.params) list (** Return the best audio stream of the input. The result is a tuple containing the index of the stream in the container, the stream and the codec of the stream. Raise Error if no stream could be found. *) val find_best_audio_stream : input container -> int * (input, audio, 'a) stream * audio Avcodec.params (** Same as {!Av.find_best_audio_stream} for the video streams. *) val find_best_video_stream : input container -> int * (input, video, 'a) stream * video Avcodec.params (** Same as {!Av.find_best_audio_stream} for the subtitle streams. *) val find_best_subtitle_stream : input container -> int * (input, subtitle, 'a) stream * subtitle Avcodec.params (** Return the input container of the input stream. *) val get_input : (input, _, _) stream -> input container (** Return the index of the stream. *) val get_index : (_, _, _) stream -> int (** [Av.get_codec stream] return the codec of the [stream]. Raise Error if the codec allocation failed. *) val get_codec_params : (_, 'media, _) stream -> 'media Avcodec.params (** [Av.get_time_base stream] return the time base of the [stream]. *) val get_time_base : (_, _, _) stream -> Avutil.rational (** [Av.set_time_base stream time_base] set the [stream] time base to [time_base]. *) val set_time_base : (_, _, _) stream -> Avutil.rational -> unit (** [Av.get_frame_size stream] return the frame size for the given audio stream. *) val get_frame_size : (output, audio, _) stream -> int (** [Av.get_pixel_aspect stream] return the pixel aspect of the [stream]. *) val get_pixel_aspect : (_, video, _) stream -> Avutil.rational option (** Same as {!Av.get_input_duration} for the input streams. *) val get_duration : ?format:Time_format.t -> (input, _, _) stream -> Int64.t (** Same as {!Av.get_input_metadata} for the input streams. *) val get_metadata : (input, _, _) stream -> (string * string) list (** For the use of a specific decoder for the given input stream. *) val set_decoder : (input, 'a, _) stream -> ('a, Avcodec.decode) Avcodec.codec -> unit type input_result = [ `Audio_packet of int * audio Avcodec.Packet.t | `Audio_frame of int * audio frame | `Video_packet of int * video Avcodec.Packet.t | `Video_frame of int * video frame | `Subtitle_packet of int * subtitle Avcodec.Packet.t | `Subtitle_frame of int * subtitle frame | `Data_packet of int * [ `Data ] Avcodec.Packet.t ] (** Reads the selected streams if any or all streams otherwise. Return the next [Audio] [Video] [Subtitle] of [Data] index and packet or frame of the input or [Error `Eof] if the end of the input is reached. Raise Error if the reading failed. Only packet and frames from the specified streams are returned. *) val read_input : ?audio_packet:(input, audio, [ `Packet ]) stream list -> ?audio_frame:(input, audio, [ `Frame ]) stream list -> ?video_packet:(input, video, [ `Packet ]) stream list -> ?video_frame:(input, video, [ `Frame ]) stream list -> ?subtitle_packet:(input, subtitle, [ `Packet ]) stream list -> ?subtitle_frame:(input, subtitle, [ `Frame ]) stream list -> ?data_packet:(input, [ `Data ], [ `Packet ]) stream list -> input container -> input_result (** Seek mode. *) type seek_flag = | Seek_flag_backward | Seek_flag_byte | Seek_flag_any | Seek_flag_frame (** [Av.seek ?flags ?stream ?min_ts ?max_ts ~fmt ~ts container] seek in the container [container] to position [ts]. You can pass an optional [stream] to use for seeking, [max_ts] and [min_ts] to force seeking to happen within a given timestamp window and [flags] to speficy certain property of the seeking operation. Raise Error if the seeking failed. *) val seek : ?flags:seek_flag list -> ?stream:(input, _, _) stream -> ?min_ts:Int64.t -> ?max_ts:Int64.t -> fmt:Time_format.t -> ts:Int64.t -> input container -> unit (** {5 Output} *) (** [Av.open_output ?interrupt ?format ?interleaved ?opts filename] open the output file named [filename]. [interrupt] is used to interrupt blocking functions, [format] may contain an optional format, [interleaved] indicates if FFmpeg's interleaved API should be used, [opts] may contain any option settable on the stream's internal AVFormat. After returning, if [opts] was passed, unused options are left in the hash table. Raise Error if the opening failed. *) val open_output : ?interrupt:(unit -> bool) -> ?format:(output, _) format -> ?interleaved:bool -> ?opts:opts -> string -> output container (** [Av.open_stream callbacks] open the output container with the given callbacks. [opts] may contain any option settable on Ffmpeg avformat. After returning, if [opts] was passed, unused options are left in the hash table. Raise Error if the opening failed. Exceptions from the callback are caught and result in a native [Avutil.Error `Unknown] error. *) val open_output_stream : ?opts:opts -> ?interleaved:bool -> ?seek:seek -> write -> (output, _) format -> output container (** Returns [true] if the output has already started, in which case no new * stream or metadata can be added. *) val output_started : output container -> bool (** [Av.set_output_metadata dst tags] set the metadata of the [dst] output with the [tags] tag list. This must be set before starting writing streams. Raise Error if a writing already taken place or if the setting failed. *) val set_output_metadata : output container -> (string * string) list -> unit (** Same as {!Av.set_output_metadata} for the output streams. *) val set_metadata : (_, _, _) stream -> (string * string) list -> unit (** Return the output container of the output stream. *) val get_output : (output, _, _) stream -> output container (* Create a new stream that only supports packet input and does not do any encoding. Used for remuxing with encoded data. *) val new_stream_copy : params:'mode Avcodec.params -> output container -> (output, 'mode, [ `Packet ]) stream (* Asynchronous stream copy creation *) type uninitialized_stream_copy (* Create a new uninitialized stream copy. This can be used to reserve a stream index in the output container while waiting for its actual parameters. *) val new_uninitialized_stream_copy : output container -> uninitialized_stream_copy (* Initialize a stream copy. *) val initialize_stream_copy : params:'mode Avcodec.params -> uninitialized_stream_copy -> (output, 'mode, [ `Packet ]) stream (** Add a new audio stream to the given container. Stream only supports frames and encodes its input. [opts] may contain any option settable on the stream's internal AVCodec. After returning, if [opts] was passed, unused options are left in the hash table. At least one of [channels] or [channel_layout] must be passed. Frames passed to this stream for encoding must have a PTS set according to the given [time_base]. [1/sample_rate] is usually a good value for the [time_base]. Please note that some codec require a fixed frame size, denoted by the absence of the [`Variable_frame_size] codec capabilities. In this case, the user is expected to pass frames containing exactly [Av.get_frame_size stream]. [Avfilter] can be used to slice frames into frames of fixed size. See [Avfilter.Utils.convert_audio] for an example. Raise Error if the opening failed. *) val new_audio_stream : ?opts:opts -> ?channels:int -> ?channel_layout:Channel_layout.t -> sample_rate:int -> sample_format:Avutil.Sample_format.t -> time_base:Avutil.rational -> codec:[ `Encoder ] Avcodec.Audio.t -> output container -> (output, audio, [ `Frame ]) stream (** Add a new video stream to the given container. Stream only supports frames and encodes its input. [opts] may contain any option settable on the stream's internal AVCodec. After returning, if [opts] was passed, unused options are left in the hash table. Frames passed to this stream for encoding must have a PTS set according to the given [time_base]. [1/frame_rate] is usually a good value for the [time_base]. [hardware_context] can be used to pass optional hardware device and frame context to enable hardward encoding on this stream. Raise Error if the opening failed. *) val new_video_stream : ?opts:opts -> ?frame_rate:Avutil.rational -> ?hardware_context:Avcodec.Video.hardware_context -> pixel_format:Avutil.Pixel_format.t -> width:int -> height:int -> time_base:Avutil.rational -> codec:[ `Encoder ] Avcodec.Video.t -> output container -> (output, video, [ `Frame ]) stream (** Add a new subtitle stream to the given container. Stream only supports frames and encodes its input. [opts] may contain any option settable on the stream's internal AVCodec. After returning, if [opts] was passed, unused options are left in the hash table. Raise Error if the opening failed. *) val new_subtitle_stream : ?opts:opts -> time_base:Avutil.rational -> codec:[ `Encoder ] Avcodec.Subtitle.t -> output container -> (output, subtitle, [ `Frame ]) stream (** Add a new data stream to the given container. [opts] may contain any option settable on the stream's internal AVCodec. After returning, if [opts] was passed, unused options are left in the hash table. Raise Error if the opening failed. *) val new_data_stream : time_base:Avutil.rational -> codec:Avcodec.Unknown.id -> output container -> (output, [ `Data ], [ `Packet ]) stream (** Return a codec attribute suitable for HLS playlists when available. *) val codec_attr : _ stream -> string option (** Return the stream's bitrate when available, suitable for HLS playlists. *) val bitrate : _ stream -> int option (** [Av.write_packet os time_base pkt] write the [pkt] packet to the [os] output stream. [time_base] is the packet's PTS/DTS/duration time base. Raise Error if the writing failed. *) val write_packet : (output, 'media, [ `Packet ]) stream -> Avutil.rational -> 'media Avcodec.Packet.t -> unit (** [Av.write_frame os frm] write the [frm] frame to the [os] output stream. Frame PTS should be set and counted in units of [time_base], as passed when creating the stream Raise Error if the writing failed. *) val write_frame : (output, 'media, [ `Frame ]) stream -> 'media frame -> unit (** [true] if the last processed frame was a video key frame. *) val was_keyframe : (output, _, _) stream -> bool (** [Av.write_audio_frame dst frm] write the [frm] audio frame to the [dst] output audio container. Raise Error if the output format is not defined or if the output media type is not compatible with the frame or if the writing failed. *) val write_audio_frame : output container -> audio frame -> unit (** Same as {!Av.write_audio_frame} for output video container. *) val write_video_frame : output container -> video frame -> unit (** Flush the underlying muxer. *) val flush : output container -> unit (** Close an input or output container. *) val close : _ container -> unit ocaml-ffmpeg-1.1.11/av/av_stubs.c000066400000000000000000002030061457634536500165530ustar00rootroot00000000000000#include #define CAML_NAME_SPACE 1 #include #include #include #include #include #include #include #include #include #ifndef Bytes_val #define Bytes_val String_val #endif #include #include #include #include #include #include #include #include "av_stubs.h" #include "avcodec_stubs.h" #include "avutil_stubs.h" /**** Init ****/ value ocaml_av_init(value unit) { #if LIBAVCODEC_VERSION_INT < AV_VERSION_INT(58, 9, 100) av_register_all(); #endif avformat_network_init(); return Val_unit; } /**** Context ****/ typedef struct { int index; AVCodecContext *codec_context; // input int got_frame; // output int was_keyframe; } stream_t; typedef struct av_t { AVFormatContext *format_context; stream_t **streams; value control_message_callback; int is_input; value interrupt_cb; int closed; // input int end_of_file; int frames_pending; stream_t *best_audio_stream; stream_t *best_video_stream; stream_t *best_subtitle_stream; // output int header_written; int (*write_frame)(AVFormatContext *, AVPacket *); int custom_io; } av_t; #define Av_base_val(v) (*(av_t **)Data_custom_val(v)) static inline av_t *Av_val(value v) { av_t *av = Av_base_val(v); if (av->closed) Fail("Container closed!"); return av; } /***** Stream handler *****/ #define StreamIndex_val(v) Int_val(Field(v, 1)) /**** Media Type ****/ static const enum AVMediaType MEDIA_TYPES[] = { AVMEDIA_TYPE_AUDIO, AVMEDIA_TYPE_VIDEO, AVMEDIA_TYPE_DATA, AVMEDIA_TYPE_SUBTITLE}; #define MEDIA_TYPES_LEN (sizeof(MEDIA_TYPES) / sizeof(enum AVMediaType)) enum AVMediaType MediaType_val(value v) { return MEDIA_TYPES[Int_val(v)]; } static void free_stream(stream_t *stream) { if (!stream) return; if (stream->codec_context) avcodec_free_context(&stream->codec_context); free(stream); } static void close_av(av_t *av) { if (av->closed) return; caml_release_runtime_system(); if (av->format_context) { if (av->streams) { unsigned int i; for (i = 0; i < av->format_context->nb_streams; i++) { if (av->streams[i]) free_stream(av->streams[i]); } free(av->streams); av->streams = NULL; } if (av->format_context->iformat) { avformat_close_input(&av->format_context); } else if (av->format_context->oformat) { // Close the output file if needed. if (!av->custom_io && !(av->format_context->oformat->flags & AVFMT_NOFILE)) avio_closep(&av->format_context->pb); avformat_free_context(av->format_context); av->format_context = NULL; } av->best_audio_stream = NULL; av->best_video_stream = NULL; av->best_subtitle_stream = NULL; } caml_acquire_runtime_system(); if (av->control_message_callback) { caml_remove_generational_global_root(&av->control_message_callback); } if (av->interrupt_cb != Val_none) { caml_remove_generational_global_root(&av->interrupt_cb); av->interrupt_cb = Val_none; } av->closed = 1; } static void finalize_av(value v) { free(Av_base_val(v)); } static struct custom_operations av_ops = { "ocaml_av_context", finalize_av, custom_compare_default, custom_hash_default, custom_serialize_default, custom_deserialize_default}; AVFormatContext *ocaml_av_get_format_context(value *p_av) { return Av_val(*p_av)->format_context; } CAMLprim value ocaml_av_container_options(value unit) { CAMLparam0(); CAMLlocal1(ret); CAMLreturn(value_of_avclass(ret, avformat_get_class())); } CAMLprim value ocaml_av_get_streams(value _av, value _media_type) { CAMLparam2(_av, _media_type); CAMLlocal2(list, cons); av_t *av = Av_val(_av); enum AVMediaType type = MediaType_val(_media_type); unsigned int i; List_init(list); for (i = 0; i < av->format_context->nb_streams; i++) { if (av->format_context->streams[i]->codecpar->codec_type == type) List_add(list, cons, Val_int(i)); } CAMLreturn(list); } CAMLprim value ocaml_av_get_stream_codec_parameters(value _stream) { CAMLparam1(_stream); CAMLlocal2(ans, _av); _av = Field(_stream, 0); av_t *av = Av_val(_av); int index = StreamIndex_val(_stream); value_of_codec_parameters_copy(av->format_context->streams[index]->codecpar, &ans); CAMLreturn(ans); } CAMLprim value ocaml_av_get_stream_time_base(value _stream) { CAMLparam1(_stream); CAMLlocal2(ans, _av); _av = Field(_stream, 0); av_t *av = Av_val(_av); int index = StreamIndex_val(_stream); value_of_rational(&av->format_context->streams[index]->time_base, &ans); CAMLreturn(ans); } CAMLprim value ocaml_av_set_stream_time_base(value _stream, value _time_base) { CAMLparam2(_stream, _time_base); CAMLlocal1(_av); _av = Field(_stream, 0); av_t *av = Av_val(_av); int index = StreamIndex_val(_stream); av->format_context->streams[index]->time_base = rational_of_value(_time_base); CAMLreturn(Val_unit); } CAMLprim value ocaml_av_get_stream_frame_size(value _stream) { CAMLparam1(_stream); CAMLlocal1(_av); _av = Field(_stream, 0); av_t *av = Av_val(_av); int index = StreamIndex_val(_stream); CAMLreturn(Val_int(av->streams[index]->codec_context->frame_size)); } CAMLprim value ocaml_av_get_stream_pixel_aspect(value _stream) { CAMLparam1(_stream); CAMLlocal3(ans, ret, _av); _av = Field(_stream, 0); av_t *av = Av_val(_av); int index = StreamIndex_val(_stream); const AVRational pixel_aspect = av->format_context->streams[index]->sample_aspect_ratio; if (pixel_aspect.num == 0) CAMLreturn(Val_none); value_of_rational(&pixel_aspect, &ans); ret = caml_alloc_tuple(1); Store_field(ret, 0, ans); CAMLreturn(ret); } value *ocaml_av_get_control_message_callback(struct AVFormatContext *ctx) { return &((av_t *)ctx->opaque)->control_message_callback; } void ocaml_av_set_control_message_callback(value *p_av, av_format_control_message c_callback, value *p_ocaml_callback) { av_t *av = Av_val(*p_av); if (!av->control_message_callback) { av->control_message_callback = *p_ocaml_callback; caml_register_generational_global_root(&av->control_message_callback); } else { caml_modify_generational_global_root(&av->control_message_callback, *p_ocaml_callback); } av->format_context->opaque = (void *)av; av->format_context->control_message_cb = c_callback; } /***** Input *****/ /***** AVIO *****/ typedef struct avio_t { AVFormatContext *format_context; AVIOContext *avio_context; value read_cb; value write_cb; value seek_cb; } avio_t; #define Avio_val(v) (*(avio_t **)Data_abstract_val(v)) static int ocaml_avio_read_callback(void *private, uint8_t *buf, int buf_size) { value buffer, res; avio_t *avio = (avio_t *)private; int ret; size_t exn_len; char *caml_exn = NULL; char *c_exn = NULL; ret = caml_c_thread_register(); caml_acquire_runtime_system(); buffer = caml_alloc_string(buf_size); caml_register_generational_global_root(&buffer); res = caml_callback3_exn(avio->read_cb, buffer, Val_int(0), Val_int(buf_size)); if (Is_exception_result(res)) { res = Extract_exception(res); caml_exn = caml_format_exception(res); if (caml_exn) { exn_len = strlen(caml_exn) + 1; c_exn = malloc(exn_len); if (!c_exn) caml_raise_out_of_memory(); memcpy(c_exn, caml_exn, exn_len); caml_stat_free(caml_exn); } if (c_exn) { av_log(avio->avio_context, AV_LOG_ERROR, "Error while executing OCaml read callback: %s\n", c_exn); free(c_exn); } caml_remove_generational_global_root(&buffer); caml_release_runtime_system(); if (ret != 0) { caml_c_thread_unregister(); } return AVERROR_EXTERNAL; } if (Int_val(res) < 0) { caml_remove_generational_global_root(&buffer); caml_release_runtime_system(); if (ret != 0) { caml_c_thread_unregister(); } return Int_val(res); } memcpy(buf, String_val(buffer), Int_val(res)); caml_remove_generational_global_root(&buffer); caml_release_runtime_system(); if (ret != 0) { caml_c_thread_unregister(); } if (Int_val(res) == 0) { return AVERROR_EOF; } return Int_val(res); } static int ocaml_avio_write_callback(void *private, uint8_t *buf, int buf_size) { value buffer, res; avio_t *avio = (avio_t *)private; int ret; size_t exn_len; char *caml_exn = NULL; char *c_exn = NULL; ret = caml_c_thread_register(); caml_acquire_runtime_system(); buffer = caml_alloc_string(buf_size); caml_register_generational_global_root(&buffer); memcpy(Bytes_val(buffer), buf, buf_size); res = caml_callback3_exn(avio->write_cb, buffer, Val_int(0), Val_int(buf_size)); if (Is_exception_result(res)) { res = Extract_exception(res); caml_exn = caml_format_exception(res); if (caml_exn) { exn_len = strlen(caml_exn) + 1; c_exn = malloc(exn_len); if (!c_exn) caml_raise_out_of_memory(); memcpy(c_exn, caml_exn, exn_len); caml_stat_free(caml_exn); } if (c_exn) { av_log(avio->avio_context, AV_LOG_ERROR, "Error while executing OCaml write callback: %s\n", c_exn); free(c_exn); } caml_remove_generational_global_root(&buffer); caml_release_runtime_system(); if (ret != 0) { caml_c_thread_unregister(); } return AVERROR_EXTERNAL; } caml_remove_generational_global_root(&buffer); caml_release_runtime_system(); if (ret != 0) { caml_c_thread_unregister(); } return Int_val(res); } static int64_t ocaml_avio_seek_callback(void *private, int64_t offset, int whence) { value res; avio_t *avio = (avio_t *)private; int _whence, ret; int64_t n; switch (whence) { case SEEK_SET: _whence = 0; break; case SEEK_CUR: _whence = 1; break; case SEEK_END: _whence = 2; break; default: return -1; } ret = caml_c_thread_register(); caml_acquire_runtime_system(); res = caml_callback2(avio->seek_cb, Val_int(offset), Val_int(_whence)); n = Int_val(res); caml_release_runtime_system(); if (ret != 0) { caml_c_thread_unregister(); } return n; } static int ocaml_av_interrupt_callback(void *private) { value res; av_t *av = (av_t *)private; int ret, n; ret = caml_c_thread_register(); caml_acquire_runtime_system(); res = caml_callback(av->interrupt_cb, Val_unit); n = Int_val(res); caml_release_runtime_system(); if (ret != 0) { caml_c_thread_unregister(); } return n; }; CAMLprim value ocaml_av_create_io(value bufsize, value _read_cb, value _write_cb, value _seek_cb) { CAMLparam3(_read_cb, _write_cb, _seek_cb); CAMLlocal1(ret); int (*read_cb)(void *opaque, uint8_t *buf, int buf_size) = NULL; int (*write_cb)(void *opaque, uint8_t *buf, int buf_size) = NULL; int64_t (*seek_cb)(void *opaque, int64_t offset, int whence) = NULL; int write_flag = 0; unsigned char *buffer; int buffer_size; avio_t *avio = (avio_t *)calloc(1, sizeof(avio_t)); if (!avio) caml_raise_out_of_memory(); avio->read_cb = (value)NULL; avio->write_cb = (value)NULL; avio->seek_cb = (value)NULL; avio->format_context = avformat_alloc_context(); if (!avio->format_context) { free(avio); caml_raise_out_of_memory(); } buffer_size = Int_val(bufsize); buffer = av_malloc(buffer_size); if (!buffer) { avformat_free_context(avio->format_context); free(avio); caml_raise_out_of_memory(); } if (_read_cb != Val_none) { avio->read_cb = Some_val(_read_cb); caml_register_generational_global_root(&avio->read_cb); read_cb = ocaml_avio_read_callback; } if (_write_cb != Val_none) { avio->write_cb = Some_val(_write_cb); caml_register_generational_global_root(&avio->write_cb); write_cb = ocaml_avio_write_callback; write_flag = 1; } if (_seek_cb != Val_none) { avio->seek_cb = Some_val(_seek_cb); caml_register_generational_global_root(&avio->seek_cb); seek_cb = ocaml_avio_seek_callback; } avio->avio_context = avio_alloc_context(buffer, buffer_size, write_flag, (void *)avio, read_cb, write_cb, seek_cb); if (!avio->avio_context) { if (avio->read_cb) caml_remove_generational_global_root(&avio->read_cb); if (avio->write_cb) caml_remove_generational_global_root(&avio->write_cb); if (avio->seek_cb) caml_remove_generational_global_root(&avio->seek_cb); av_freep(buffer); avformat_free_context(avio->format_context); free(avio); caml_raise_out_of_memory(); } avio->format_context->pb = avio->avio_context; ret = caml_alloc(1, Abstract_tag); Avio_val(ret) = avio; CAMLreturn(ret); } CAMLprim value caml_av_input_io_finalise(value _avio) { CAMLparam1(_avio); avio_t *avio = Avio_val(_avio); // format_context is freed as part of close_av. av_free(avio->avio_context->buffer); avio_context_free(&avio->avio_context); if (avio->read_cb) caml_remove_generational_global_root(&avio->read_cb); if (avio->write_cb) caml_remove_generational_global_root(&avio->write_cb); if (avio->seek_cb) caml_remove_generational_global_root(&avio->seek_cb); free(avio); CAMLreturn(Val_unit); } /***** AVInputFormat *****/ void value_of_inputFormat(avioformat_const AVInputFormat *inputFormat, value *p_value) { if (!inputFormat) Fail("Empty input format"); *p_value = caml_alloc(1, Abstract_tag); InputFormat_val((*p_value)) = inputFormat; } CAMLprim value ocaml_av_find_input_format(value _short_name) { CAMLparam1(_short_name); CAMLlocal1(ret); char *short_name = strndup(String_val(_short_name), caml_string_length(_short_name)); if (!short_name) caml_raise_out_of_memory(); caml_release_runtime_system(); avioformat_const AVInputFormat *format = av_find_input_format(short_name); caml_acquire_runtime_system(); free(short_name); if (!format) caml_raise_not_found(); value_of_inputFormat(format, &ret); CAMLreturn(ret); } CAMLprim value ocaml_av_input_format_get_name(value _format) { CAMLparam1(_format); const char *n = InputFormat_val(_format)->name; CAMLreturn(caml_copy_string(n ? n : "")); } CAMLprim value ocaml_av_input_format_get_long_name(value _format) { CAMLparam1(_format); const char *n = InputFormat_val(_format)->long_name; CAMLreturn(caml_copy_string(n ? n : "")); } static av_t *open_input(char *url, avioformat_const AVInputFormat *format, AVFormatContext *format_context, value _interrupt, AVDictionary **options) { int err; av_t *av = (av_t *)calloc(1, sizeof(av_t)); if (!av) { if (url) free(url); caml_raise_out_of_memory(); } if (format_context) { av->format_context = format_context; } else { av->format_context = avformat_alloc_context(); } if (!av->format_context) { if (url) free(url); free(av); caml_raise_out_of_memory(); } av->closed = 0; av->is_input = 1; av->frames_pending = 0; av->streams = NULL; if (_interrupt != Val_none) { av->interrupt_cb = Some_val(_interrupt); caml_register_generational_global_root(&av->interrupt_cb); av->format_context->interrupt_callback.callback = ocaml_av_interrupt_callback; av->format_context->interrupt_callback.opaque = (void *)av; } else { av->interrupt_cb = Val_none; } caml_release_runtime_system(); err = avformat_open_input(&av->format_context, url, format, options); caml_acquire_runtime_system(); if (err < 0) { if (av->interrupt_cb != Val_none) { caml_remove_generational_global_root(&av->interrupt_cb); av->interrupt_cb = Val_none; } free(av); if (url) free(url); av_dict_free(options); ocaml_avutil_raise_error(err); } // retrieve stream information caml_release_runtime_system(); err = avformat_find_stream_info(av->format_context, NULL); caml_acquire_runtime_system(); if (err < 0) { avformat_free_context(av->format_context); if (av->interrupt_cb != Val_none) { caml_remove_generational_global_root(&av->interrupt_cb); av->interrupt_cb = Val_none; } free(av); if (url) free(url); av_dict_free(options); ocaml_avutil_raise_error(err); } return av; } CAMLprim value ocaml_av_open_input(value _url, value _format, value _interrupt, value _opts) { CAMLparam4(_url, _format, _interrupt, _opts); CAMLlocal3(ret, ans, unused); char *url = NULL; avioformat_const AVInputFormat *format = NULL; int ulen = caml_string_length(_url); AVDictionary *options = NULL; char *key, *val; int len = Wosize_val(_opts); int i, err, count; for (i = 0; i < len; i++) { // Dictionaries copy key/values by default! key = (char *)Bytes_val(Field(Field(_opts, i), 0)); val = (char *)Bytes_val(Field(Field(_opts, i), 1)); err = av_dict_set(&options, key, val, 0); if (err < 0) { av_dict_free(&options); ocaml_avutil_raise_error(err); } } if (ulen > 0) url = strndup(String_val(_url), ulen); if (_format != Val_none) format = InputFormat_val(Some_val(_format)); if (format == NULL && url == NULL) { av_dict_free(&options); Fail("At least one format or url must be provided!"); } // open input url av_t *av = open_input(url, format, NULL, _interrupt, &options); if (url) free(url); // Return unused keys count = av_dict_count(options); unused = caml_alloc_tuple(count); AVDictionaryEntry *entry = NULL; for (i = 0; i < count; i++) { entry = av_dict_get(options, "", entry, AV_DICT_IGNORE_SUFFIX); Store_field(unused, i, caml_copy_string(entry->key)); } av_dict_free(&options); // allocate format context ans = caml_alloc_custom(&av_ops, sizeof(av_t *), 0, 1); Av_base_val(ans) = av; ret = caml_alloc_tuple(2); Store_field(ret, 0, ans); Store_field(ret, 1, unused); CAMLreturn(ret); } CAMLprim value ocaml_av_open_input_stream(value _avio, value _format, value _opts) { CAMLparam3(_avio, _format, _opts); CAMLlocal3(ret, ans, unused); avio_t *avio = Avio_val(_avio); avioformat_const AVInputFormat *format = NULL; AVDictionary *options = NULL; char *key, *val; int len = Wosize_val(_opts); int i, err, count; for (i = 0; i < len; i++) { // Dictionaries copy key/values by default! key = (char *)Bytes_val(Field(Field(_opts, i), 0)); val = (char *)Bytes_val(Field(Field(_opts, i), 1)); err = av_dict_set(&options, key, val, 0); if (err < 0) { av_dict_free(&options); ocaml_avutil_raise_error(err); } } if (_format != Val_none) format = InputFormat_val(Some_val(_format)); // open input format av_t *av = open_input(NULL, format, avio->format_context, Val_none, &options); // Return unused keys count = av_dict_count(options); unused = caml_alloc_tuple(count); AVDictionaryEntry *entry = NULL; for (i = 0; i < count; i++) { entry = av_dict_get(options, "", entry, AV_DICT_IGNORE_SUFFIX); Store_field(unused, i, caml_copy_string(entry->key)); } av_dict_free(&options); // allocate format context ans = caml_alloc_custom(&av_ops, sizeof(av_t *), 0, 1); Av_base_val(ans) = av; ret = caml_alloc_tuple(2); Store_field(ret, 0, ans); Store_field(ret, 1, unused); CAMLreturn(ret); } CAMLprim value ocaml_av_input_obj(value _av) { CAMLparam1(_av); CAMLlocal1(ret); CAMLreturn(value_of_avobj(ret, Av_val(_av)->format_context)); } CAMLprim value ocaml_av_get_metadata(value _av, value _stream_index) { CAMLparam1(_av); CAMLlocal3(pair, cons, list); av_t *av = Av_val(_av); int index = Int_val(_stream_index); AVDictionary *metadata = av->format_context->metadata; AVDictionaryEntry *tag = NULL; if (index >= 0) { metadata = av->format_context->streams[index]->metadata; } List_init(list); while ((tag = av_dict_get(metadata, "", tag, AV_DICT_IGNORE_SUFFIX))) { pair = caml_alloc_tuple(2); Store_field(pair, 0, caml_copy_string(tag->key)); Store_field(pair, 1, caml_copy_string(tag->value)); List_add(list, cons, pair); } CAMLreturn(list); } CAMLprim value ocaml_av_get_duration(value _av, value _stream_index, value _time_format) { CAMLparam2(_av, _time_format); CAMLlocal1(ans); av_t *av = Av_val(_av); int index = Int_val(_stream_index); if (!av->format_context) Fail("Failed to get closed input duration"); int64_t duration = av->format_context->duration; int64_t num = 1; int64_t den = AV_TIME_BASE; if (index >= 0) { duration = av->format_context->streams[index]->duration; num = (int64_t)av->format_context->streams[index]->time_base.num; den = (int64_t)av->format_context->streams[index]->time_base.den; } int64_t second_fractions = second_fractions_of_time_format(_time_format); ans = caml_copy_int64((duration * second_fractions * num) / den); CAMLreturn(ans); } static stream_t **allocate_input_context(av_t *av) { if (!av->format_context) Fail("Failed to read closed input"); // Allocate streams context array av->streams = (stream_t **)calloc(av->format_context->nb_streams, sizeof(stream_t *)); if (!av->streams) caml_raise_out_of_memory(); return av->streams; } static stream_t *allocate_stream_context(av_t *av, int index, const AVCodec *codec) { if (codec) { enum AVMediaType type = codec->type; if (type != AVMEDIA_TYPE_AUDIO && type != AVMEDIA_TYPE_VIDEO && type != AVMEDIA_TYPE_SUBTITLE) Fail("Failed to allocate stream %d of media type %s", index, av_get_media_type_string(type)); } stream_t *stream = (stream_t *)calloc(1, sizeof(stream_t)); if (!stream) caml_raise_out_of_memory(); stream->index = index; av->streams[index] = stream; if (!codec) return stream; stream->codec_context = avcodec_alloc_context3(codec); if (!stream->codec_context) { caml_raise_out_of_memory(); } return stream; } static stream_t *open_stream_index(av_t *av, int index, const AVCodec *dec) { int err; if (!av->format_context) Fail("Failed to open stream %d of closed input", index); if (index < 0 || index >= av->format_context->nb_streams) Fail("Failed to open stream %d : index out of bounds", index); if (!av->streams && !allocate_input_context(av)) caml_raise_out_of_memory(); // find decoder for the stream AVCodecParameters *dec_param = av->format_context->streams[index]->codecpar; if (!dec) { caml_release_runtime_system(); dec = avcodec_find_decoder(dec_param->codec_id); caml_acquire_runtime_system(); } if (!dec) ocaml_avutil_raise_error(AVERROR_DECODER_NOT_FOUND); stream_t *stream = allocate_stream_context(av, index, dec); if (!stream) caml_raise_out_of_memory(); // initialize the stream parameters with demuxer information err = avcodec_parameters_to_context(stream->codec_context, dec_param); if (err < 0) { free(stream); ocaml_avutil_raise_error(err); } // Open the decoder caml_release_runtime_system(); err = avcodec_open2(stream->codec_context, dec, NULL); caml_acquire_runtime_system(); if (err < 0) { free(stream); ocaml_avutil_raise_error(err); } return stream; } CAMLprim value ocaml_av_find_best_stream(value _av, value _media_type) { CAMLparam2(_av, _media_type); av_t *av = Av_val(_av); enum AVMediaType type = MediaType_val(_media_type); caml_release_runtime_system(); int index = av_find_best_stream(av->format_context, type, -1, -1, NULL, 0); caml_acquire_runtime_system(); if (index < 0) ocaml_avutil_raise_error(AVERROR_STREAM_NOT_FOUND); CAMLreturn(Val_int(index)); } static int decode_packet(av_t *av, stream_t *stream, AVPacket *packet, AVFrame *frame) { AVCodecContext *dec = stream->codec_context; int ret = 0; caml_release_runtime_system(); if (dec->codec_type == AVMEDIA_TYPE_AUDIO || dec->codec_type == AVMEDIA_TYPE_VIDEO) { // Assumption: each time this function is called with `frames_pending == 0`, // a fresh packet is also provided and no packet otherwise. if (!av->frames_pending) { ret = avcodec_send_packet(dec, packet); if (ret < 0) { caml_acquire_runtime_system(); return ret; } av_packet_unref(packet); av->frames_pending = 1; } // decode frame ret = avcodec_receive_frame(dec, frame); if (ret == AVERROR(EAGAIN)) av->frames_pending = 0; } else if (dec->codec_type == AVMEDIA_TYPE_SUBTITLE) { ret = avcodec_decode_subtitle2(dec, (AVSubtitle *)frame, &stream->got_frame, packet); if (ret >= 0) { av_packet_unref(packet); caml_acquire_runtime_system(); return ret; } } av_packet_unref(packet); caml_acquire_runtime_system(); stream->got_frame = 1; return ret; } static int read_packet(av_t *av, AVPacket *packet) { int ret; caml_release_runtime_system(); ret = av_read_frame(av->format_context, packet); if (ret == AVERROR_EOF) { packet->data = NULL; packet->size = 0; av->end_of_file = 1; caml_acquire_runtime_system(); return 0; } caml_acquire_runtime_system(); return ret; } CAMLprim value ocaml_av_read_input(value _packet, value _frame, value _av) { CAMLparam3(_packet, _frame, _av); CAMLlocal3(ans, decoded_content, frame_value); av_t *av = Av_val(_av); AVFrame *frame; int i, ret, err, frame_kind, skip; value _dec; const AVCodec *dec = NULL; if (!av->streams && !allocate_input_context(av)) caml_raise_out_of_memory(); AVPacket *packet = av_packet_alloc(); if (!packet) { caml_raise_out_of_memory(); } packet->data = NULL; packet->size = 0; stream_t **streams = av->streams; unsigned int nb_streams = av->format_context->nb_streams; stream_t *stream = NULL; do { if (!av->end_of_file && !av->frames_pending) { // Don't use ret here as it is the conditional for the // loop. err = read_packet(av, packet); if (err < 0) { av_packet_free(&packet); ocaml_avutil_raise_error(err); } if (av->end_of_file) continue; skip = 1; for (i = 0; i < Wosize_val(_packet); i++) if (Int_val(Field(Field(_packet, i), 0)) == packet->stream_index) { skip = 0; } for (i = 0; i < Wosize_val(_frame); i++) if (Int_val(Field(Field(_frame, i), 0)) == packet->stream_index) { _dec = Field(Field(_frame, i), 1); if (_dec != Val_none) { dec = AvCodec_val(Some_val(_dec)); } if ((stream = streams[packet->stream_index]) == NULL) stream = open_stream_index(av, packet->stream_index, dec); skip = 0; } if (skip) { av_packet_unref(packet); continue; } for (i = 0; i < Wosize_val(_packet); i++) { if (Int_val(Field(Field(_packet, i), 0)) == packet->stream_index) { decoded_content = caml_alloc_tuple(2); Store_field(decoded_content, 0, Val_int(packet->stream_index)); Store_field(decoded_content, 1, value_of_ffmpeg_packet(packet)); ans = caml_alloc_tuple(2); switch (Field(Field(_packet, i), 1)) { case PVV_Audio: Store_field(ans, 0, PVV_Audio_packet); break; case PVV_Video: Store_field(ans, 0, PVV_Video_packet); break; case PVV_Data: Store_field(ans, 0, PVV_Data_packet); break; default: Store_field(ans, 0, PVV_Subtitle_packet); break; } Store_field(ans, 1, decoded_content); CAMLreturn(ans); } } } else { for (i = 0; i < nb_streams; i++) { if ((stream = streams[i]) && stream->got_frame) break; } if (i == nb_streams) ocaml_avutil_raise_error(AVERROR_EOF); } skip = 1; for (i = 0; i < Wosize_val(_frame); i++) { if (Int_val(Field(Field(_frame, i), 0)) == stream->index) { skip = 0; } } if (skip) { av_packet_unref(packet); continue; } // Assign OCaml values right away to account for potential exceptions // raised below. if (stream->codec_context->codec_type == AVMEDIA_TYPE_SUBTITLE) { frame = (AVFrame *)calloc(1, sizeof(AVSubtitle)); if (!frame) { av_packet_free(&packet); caml_raise_out_of_memory(); } frame_kind = PVV_Subtitle_frame; frame_value = value_of_subtitle((AVSubtitle *)frame); } else { frame = av_frame_alloc(); if (!frame) { av_packet_free(&packet); caml_raise_out_of_memory(); } if (stream->codec_context->codec_type == AVMEDIA_TYPE_AUDIO) frame_kind = PVV_Audio_frame; else frame_kind = PVV_Video_frame; frame_value = value_of_frame(frame); } ret = decode_packet(av, stream, packet, frame); if (ret < 0 && ret != AVERROR(EAGAIN)) { av_packet_free(&packet); ocaml_avutil_raise_error(ret); } av_packet_unref(packet); } while (ret == AVERROR(EAGAIN)); av_packet_free(&packet); decoded_content = caml_alloc_tuple(2); Store_field(decoded_content, 0, Val_int(stream->index)); Store_field(decoded_content, 1, frame_value); ans = caml_alloc_tuple(2); Store_field(ans, 0, frame_kind); Store_field(ans, 1, decoded_content); CAMLreturn(ans); } static const int seek_flags[] = {AVSEEK_FLAG_BACKWARD, AVSEEK_FLAG_BYTE, AVSEEK_FLAG_ANY, AVSEEK_FLAG_FRAME}; static int seek_flags_val(value v) { return seek_flags[Int_val(v)]; } CAMLprim value ocaml_av_seek_native(value _flags, value _stream, value _min_ts, value _max_ts, value _time_format, value _timestamp, value _av) { CAMLparam5(_flags, _stream, _min_ts, _max_ts, _time_format); CAMLxparam2(_timestamp, _av); av_t *av = Av_val(_av); int index = -1; int64_t min_ts = INT64_MIN; int64_t max_ts = INT64_MAX; int64_t timestamp = Int64_val(_timestamp); int64_t second_fractions = second_fractions_of_time_format(_time_format); int64_t num = AV_TIME_BASE; int64_t den = 1; int flags = 0; int ret, i; if (!av->format_context) Fail("Failed to seek closed input"); if (_stream != Val_none) { index = StreamIndex_val(Field(_stream, 0)); } if (index >= 0) { num = (int64_t)av->format_context->streams[index]->time_base.den; den = (int64_t)av->format_context->streams[index]->time_base.num; } timestamp = (timestamp * num) / (den * second_fractions); if (_min_ts != Val_none) { min_ts = (Int64_val(Field(_min_ts, 0)) * num) / (den * second_fractions); } if (_max_ts != Val_none) { max_ts = (Int64_val(Field(_max_ts, 0)) * num) / (den * second_fractions); } for (i = 0; i < Wosize_val(_flags); i++) flags |= seek_flags_val(Field(_flags, i)); caml_release_runtime_system(); ret = avformat_seek_file(av->format_context, index, min_ts, timestamp, max_ts, flags); caml_acquire_runtime_system(); if (ret < 0) ocaml_avutil_raise_error(ret); CAMLreturn(Val_unit); } CAMLprim value ocaml_av_seek_bytecode(value *argv, int argn) { return ocaml_av_seek_native(argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6]); } /***** Output *****/ /***** AVOutputFormat *****/ value value_of_outputFormat(avioformat_const AVOutputFormat *outputFormat) { value v; if (!outputFormat) Fail("Empty output format"); v = caml_alloc(1, Abstract_tag); OutputFormat_val(v) = outputFormat; return v; } CAMLprim value ocaml_av_output_format_guess(value _short_name, value _filename, value _mime) { CAMLparam3(_short_name, _filename, _mime); CAMLlocal1(ans); char *short_name = NULL; char *filename = NULL; char *mime = NULL; avioformat_const AVOutputFormat *guessed; if (caml_string_length(_short_name) > 0) { short_name = strndup(String_val(_short_name), caml_string_length(_short_name)); if (!short_name) caml_raise_out_of_memory(); }; if (caml_string_length(_filename) > 0) { filename = strndup(String_val(_filename), caml_string_length(_filename)); if (!filename) { if (short_name) free(short_name); caml_raise_out_of_memory(); } } if (caml_string_length(_mime) > 0) { mime = strndup(String_val(_mime), caml_string_length(_mime)); if (!mime) { if (short_name) free(short_name); if (filename) free(filename); caml_raise_out_of_memory(); } } caml_release_runtime_system(); guessed = av_guess_format(short_name, filename, mime); caml_acquire_runtime_system(); if (short_name) free(short_name); if (filename) free(filename); if (mime) free(mime); if (!guessed) CAMLreturn(Val_none); ans = caml_alloc_tuple(1); Store_field(ans, 0, value_of_outputFormat(guessed)); CAMLreturn(ans); } CAMLprim value ocaml_av_output_format_get_name(value _format) { CAMLparam1(_format); const char *n = OutputFormat_val(_format)->name; CAMLreturn(caml_copy_string(n ? n : "")); } CAMLprim value ocaml_av_output_format_get_long_name(value _format) { CAMLparam1(_format); const char *n = OutputFormat_val(_format)->long_name; CAMLreturn(caml_copy_string(n ? n : "")); } CAMLprim value ocaml_av_output_format_get_audio_codec_id(value _output_format) { CAMLparam1(_output_format); CAMLreturn(Val_AudioCodecID(OutputFormat_val(_output_format)->audio_codec)); } CAMLprim value ocaml_av_output_format_get_video_codec_id(value _output_format) { CAMLparam1(_output_format); CAMLreturn(Val_VideoCodecID(OutputFormat_val(_output_format)->video_codec)); } CAMLprim value ocaml_av_output_format_get_subtitle_codec_id(value _output_format) { CAMLparam1(_output_format); CAMLreturn( Val_SubtitleCodecID(OutputFormat_val(_output_format)->subtitle_codec)); } static av_t *open_output(avioformat_const AVOutputFormat *format, char *file_name, AVIOContext *avio_context, value _interrupt, int interleaved, AVDictionary **options) { int ret; AVIOInterruptCB interrupt_cb = {ocaml_av_interrupt_callback, NULL}; AVIOInterruptCB *interrupt_cb_ptr = NULL; av_t *av = (av_t *)calloc(1, sizeof(av_t)); if (!av) { if (file_name) free(file_name); av_dict_free(options); caml_raise_out_of_memory(); } av->closed = 0; if (interleaved) { av->write_frame = &av_interleaved_write_frame; } else { av->write_frame = &av_write_frame; } if (_interrupt != Val_none) { av->interrupt_cb = Some_val(_interrupt); caml_register_generational_global_root(&av->interrupt_cb); interrupt_cb.opaque = (void *)av; interrupt_cb_ptr = &interrupt_cb; } else { av->interrupt_cb = Val_none; } ret = avformat_alloc_output_context2(&av->format_context, format, NULL, file_name); if (ret < 0) { if (file_name) free(file_name); av_dict_free(options); free(av); ocaml_avutil_raise_error(ret); } ret = av_opt_set_dict(av->format_context, options); if (ret < 0) { if (av->interrupt_cb != Val_none) { caml_remove_generational_global_root(&av->interrupt_cb); av->interrupt_cb = Val_none; } free(av); if (file_name) free(file_name); av_dict_free(options); free(av); ocaml_avutil_raise_error(ret); } if (av->format_context->priv_data) { ret = av_opt_set_dict(av->format_context->priv_data, options); if (ret < 0) { free(av); if (file_name) free(file_name); av_dict_free(options); ocaml_avutil_raise_error(ret); } } // open the output file, if needed if (avio_context) { if (av->format_context->oformat->flags & AVFMT_NOFILE) { free(av); if (file_name) free(file_name); av_dict_free(options); av_dict_free(options); Fail("Cannot set custom I/O on this format!"); } av->format_context->pb = avio_context; av->custom_io = 1; } else { if (!(av->format_context->oformat->flags & AVFMT_NOFILE)) { caml_release_runtime_system(); int err = avio_open2(&av->format_context->pb, file_name, AVIO_FLAG_WRITE, interrupt_cb_ptr, options); caml_acquire_runtime_system(); if (err < 0) { if (av->interrupt_cb != Val_none) { caml_remove_generational_global_root(&av->interrupt_cb); av->interrupt_cb = Val_none; } free(av); if (file_name) free(file_name); av_dict_free(options); av_dict_free(options); ocaml_avutil_raise_error(err); } av->custom_io = 0; } } if (file_name) free(file_name); return av; } CAMLprim value ocaml_av_open_output(value _interrupt, value _format, value _filename, value _interleaved, value _opts) { CAMLparam3(_interrupt, _filename, _opts); CAMLlocal3(ans, ret, unused); char *filename = strndup(String_val(_filename), caml_string_length(_filename)); avioformat_const AVOutputFormat *format = NULL; AVDictionary *options = NULL; char *key, *val; int len = Wosize_val(_opts); int i, err, count; for (i = 0; i < len; i++) { // Dictionaries copy key/values by default! key = (char *)Bytes_val(Field(Field(_opts, i), 0)); val = (char *)Bytes_val(Field(Field(_opts, i), 1)); err = av_dict_set(&options, key, val, 0); if (err < 0) { av_dict_free(&options); ocaml_avutil_raise_error(err); } } if (_format != Val_none) format = OutputFormat_val(Some_val(_format)); // open output file av_t *av = open_output(format, filename, NULL, _interrupt, Bool_val(_interleaved), &options); // Return unused keys count = av_dict_count(options); unused = caml_alloc_tuple(count); AVDictionaryEntry *entry = NULL; for (i = 0; i < count; i++) { entry = av_dict_get(options, "", entry, AV_DICT_IGNORE_SUFFIX); Store_field(unused, i, caml_copy_string(entry->key)); } av_dict_free(&options); // allocate format context ans = caml_alloc_custom(&av_ops, sizeof(av_t *), 0, 1); Av_base_val(ans) = av; ret = caml_alloc_tuple(2); Store_field(ret, 0, ans); Store_field(ret, 1, unused); CAMLreturn(ret); } CAMLprim value ocaml_av_open_output_format(value _format, value _interleaved, value _opts) { CAMLparam2(_format, _opts); CAMLlocal3(ans, ret, unused); AVDictionary *options = NULL; char *key, *val; int len = Wosize_val(_opts); int i, err, count; for (i = 0; i < len; i++) { // Dictionaries copy key/values by default! key = (char *)Bytes_val(Field(Field(_opts, i), 0)); val = (char *)Bytes_val(Field(Field(_opts, i), 1)); err = av_dict_set(&options, key, val, 0); if (err < 0) { av_dict_free(&options); ocaml_avutil_raise_error(err); } } avioformat_const AVOutputFormat *format = OutputFormat_val(_format); // open output format av_t *av = open_output(format, NULL, NULL, Val_none, Bool_val(_interleaved), &options); // Return unused keys count = av_dict_count(options); unused = caml_alloc_tuple(count); AVDictionaryEntry *entry = NULL; for (i = 0; i < count; i++) { entry = av_dict_get(options, "", entry, AV_DICT_IGNORE_SUFFIX); Store_field(unused, i, caml_copy_string(entry->key)); } av_dict_free(&options); // allocate format context ans = caml_alloc_custom(&av_ops, sizeof(av_t *), 0, 1); Av_base_val(ans) = av; ret = caml_alloc_tuple(2); Store_field(ret, 0, ans); Store_field(ret, 1, unused); CAMLreturn(ret); } CAMLprim value ocaml_av_open_output_stream(value _format, value _avio, value _interleaved, value _opts) { CAMLparam3(_format, _avio, _opts); CAMLlocal3(ans, ret, unused); avioformat_const AVOutputFormat *format = OutputFormat_val(_format); avio_t *avio = Avio_val(_avio); AVDictionary *options = NULL; char *key, *val; int len = Wosize_val(_opts); int i, err, count; for (i = 0; i < len; i++) { // Dictionaries copy key/values by default! key = (char *)Bytes_val(Field(Field(_opts, i), 0)); val = (char *)Bytes_val(Field(Field(_opts, i), 1)); err = av_dict_set(&options, key, val, 0); if (err < 0) { av_dict_free(&options); ocaml_avutil_raise_error(err); } } // open output format av_t *av = open_output(format, NULL, avio->avio_context, Val_none, Bool_val(_interleaved), &options); // Return unused keys count = av_dict_count(options); unused = caml_alloc_tuple(count); AVDictionaryEntry *entry = NULL; for (i = 0; i < count; i++) { entry = av_dict_get(options, "", entry, AV_DICT_IGNORE_SUFFIX); Store_field(unused, i, caml_copy_string(entry->key)); } av_dict_free(&options); // allocate format context ans = caml_alloc_custom(&av_ops, sizeof(av_t *), 0, 1); Av_base_val(ans) = av; ret = caml_alloc_tuple(2); Store_field(ret, 0, ans); Store_field(ret, 1, unused); CAMLreturn(ret); } CAMLprim value ocaml_av_header_written(value _av) { CAMLparam1(_av); av_t *av = Av_val(_av); CAMLreturn(Val_bool(av->header_written)); } CAMLprim value ocaml_av_set_metadata(value _av, value _stream_index, value _tags) { CAMLparam2(_av, _tags); CAMLlocal1(pair); av_t *av = Av_val(_av); int index = Int_val(_stream_index); AVDictionary *metadata = NULL; if (!av->format_context) Fail("Failed to set metadata to closed output"); if (av->header_written) Fail("Failed to set metadata : header already written"); int i, ret, len = Wosize_val(_tags); av_dict_free(&metadata); for (i = 0; i < len; i++) { pair = Field(_tags, i); ret = av_dict_set(&metadata, String_val(Field(pair, 0)), String_val(Field(pair, 1)), 0); if (ret < 0) ocaml_avutil_raise_error(ret); } if (index < 0) { av->format_context->metadata = metadata; } else { av->format_context->streams[index]->metadata = metadata; } CAMLreturn(Val_unit); } static stream_t *new_stream(av_t *av, const AVCodec *codec) { if (!av->format_context) Fail("Failed to add stream to closed output"); if (av->header_written) Fail("Failed to create new stream : header already written"); // Allocate streams array size_t streams_size = sizeof(stream_t *) * (av->format_context->nb_streams + 1); stream_t **streams = (stream_t **)realloc(av->streams, streams_size); if (!streams) caml_raise_out_of_memory(); streams[av->format_context->nb_streams] = NULL; av->streams = streams; stream_t *stream = allocate_stream_context(av, av->format_context->nb_streams, codec); if (!stream) caml_raise_out_of_memory(); AVStream *avstream = avformat_new_stream(av->format_context, codec); if (!avstream) { free(stream); caml_raise_out_of_memory(); } avstream->id = av->format_context->nb_streams - 1; return stream; } static void init_stream_encoder(AVBufferRef *device_ctx, AVBufferRef *frame_ctx, av_t *av, stream_t *stream, AVDictionary **options) { AVCodecContext *enc_ctx = stream->codec_context; int ret; // Some formats want stream headers to be separate. if (av->format_context->oformat->flags & AVFMT_GLOBALHEADER) enc_ctx->flags |= AV_CODEC_FLAG_GLOBAL_HEADER; if (device_ctx) { enc_ctx->hw_device_ctx = av_buffer_ref(device_ctx); if (!enc_ctx->hw_device_ctx) { av_dict_free(options); caml_raise_out_of_memory(); } } if (frame_ctx) { enc_ctx->hw_frames_ctx = av_buffer_ref(frame_ctx); if (!enc_ctx->hw_frames_ctx) { av_dict_free(options); caml_raise_out_of_memory(); } } caml_release_runtime_system(); ret = avcodec_open2(enc_ctx, enc_ctx->codec, options); caml_acquire_runtime_system(); if (ret < 0) { av_dict_free(options); ocaml_avutil_raise_error(ret); } AVStream *avstream = av->format_context->streams[stream->index]; avstream->time_base = enc_ctx->time_base; ret = avcodec_parameters_from_context(avstream->codecpar, enc_ctx); if (ret < 0) { av_dict_free(options); ocaml_avutil_raise_error(ret); } } static stream_t *new_audio_stream(av_t *av, enum AVSampleFormat sample_fmt, int channels, const AVCodec *codec, AVDictionary **options) { stream_t *stream = new_stream(av, codec); AVCodecContext *enc_ctx = stream->codec_context; enc_ctx->sample_fmt = sample_fmt; enc_ctx->channels = channels; // Detect new API #ifdef AV_CHANNEL_LAYOUT_MONO av_channel_layout_default(&enc_ctx->ch_layout, channels); #endif init_stream_encoder(NULL, NULL, av, stream, options); return stream; } CAMLprim value ocaml_av_new_uninitialized_stream_copy(value _av) { CAMLparam1(_av); av_t *av = Av_val(_av); stream_t *stream = new_stream(av, NULL); CAMLreturn(Val_int(stream->index)); } CAMLprim value ocaml_av_initialize_stream_copy(value _av, value _stream_index, value _params) { CAMLparam2(_av, _params); av_t *av = Av_val(_av); AVStream *avstream = av->format_context->streams[Int_val(_stream_index)]; int err = avcodec_parameters_copy(avstream->codecpar, CodecParameters_val(_params)); if (err < 0) ocaml_avutil_raise_error(err); avstream->codecpar->codec_tag = 0; CAMLreturn(Val_unit); } CAMLprim value ocaml_av_new_audio_stream(value _av, value _sample_fmt, value _codec, value _channels, value _opts) { CAMLparam2(_av, _opts); CAMLlocal2(ans, unused); const AVCodec *codec = AvCodec_val(_codec); AVDictionary *options = NULL; char *key, *val; int len = Wosize_val(_opts); int i, err, count; for (i = 0; i < len; i++) { // Dictionaries copy key/values by default! key = (char *)Bytes_val(Field(Field(_opts, i), 0)); val = (char *)Bytes_val(Field(Field(_opts, i), 1)); err = av_dict_set(&options, key, val, 0); if (err < 0) { av_dict_free(&options); ocaml_avutil_raise_error(err); } } stream_t *stream = new_audio_stream(Av_val(_av), Int_val(_sample_fmt), Int_val(_channels), codec, &options); // Return unused keys count = av_dict_count(options); unused = caml_alloc_tuple(count); AVDictionaryEntry *entry = NULL; for (i = 0; i < count; i++) { entry = av_dict_get(options, "", entry, AV_DICT_IGNORE_SUFFIX); Store_field(unused, i, caml_copy_string(entry->key)); } av_dict_free(&options); ans = caml_alloc_tuple(2); Store_field(ans, 0, Val_int(stream->index)); Store_field(ans, 1, unused); CAMLreturn(ans); } static stream_t *new_video_stream(AVBufferRef *device_ctx, AVBufferRef *frame_ctx, av_t *av, const AVCodec *codec, AVDictionary **options) { stream_t *stream = new_stream(av, codec); init_stream_encoder(device_ctx, frame_ctx, av, stream, options); return stream; } CAMLprim value ocaml_av_new_video_stream(value _device_context, value _frame_context, value _av, value _codec, value _opts) { CAMLparam4(_device_context, _frame_context, _av, _opts); CAMLlocal2(ans, unused); const AVCodec *codec = AvCodec_val(_codec); AVBufferRef *device_ctx = NULL; AVBufferRef *frame_ctx = NULL; if (_device_context != Val_none) device_ctx = BufferRef_val(Some_val(_device_context)); if (_frame_context != Val_none) frame_ctx = BufferRef_val(Some_val(_frame_context)); AVDictionary *options = NULL; char *key, *val; int len = Wosize_val(_opts); int i, err, count; for (i = 0; i < len; i++) { // Dictionaries copy key/values by default! key = (char *)Bytes_val(Field(Field(_opts, i), 0)); val = (char *)Bytes_val(Field(Field(_opts, i), 1)); err = av_dict_set(&options, key, val, 0); if (err < 0) { av_dict_free(&options); ocaml_avutil_raise_error(err); } } stream_t *stream = new_video_stream(device_ctx, frame_ctx, Av_val(_av), codec, &options); // Return unused keys count = av_dict_count(options); unused = caml_alloc_tuple(count); AVDictionaryEntry *entry = NULL; for (i = 0; i < count; i++) { entry = av_dict_get(options, "", entry, AV_DICT_IGNORE_SUFFIX); Store_field(unused, i, caml_copy_string(entry->key)); } av_dict_free(&options); ans = caml_alloc_tuple(2); Store_field(ans, 0, Val_int(stream->index)); Store_field(ans, 1, unused); CAMLreturn(ans); } static stream_t *new_subtitle_stream(av_t *av, const AVCodec *codec, AVDictionary **options) { stream_t *stream = new_stream(av, codec); int ret = subtitle_header_default(stream->codec_context); if (ret < 0) { av_dict_free(options); ocaml_avutil_raise_error(ret); } init_stream_encoder(NULL, NULL, av, stream, options); return stream; } CAMLprim value ocaml_av_new_subtitle_stream(value _av, value _codec, value _opts) { CAMLparam2(_av, _opts); CAMLlocal2(ans, unused); const AVCodec *codec = AvCodec_val(_codec); AVDictionary *options = NULL; char *key, *val; int len = Wosize_val(_opts); int i, err, count; for (i = 0; i < len; i++) { // Dictionaries copy key/values by default! key = (char *)Bytes_val(Field(Field(_opts, i), 0)); val = (char *)Bytes_val(Field(Field(_opts, i), 1)); err = av_dict_set(&options, key, val, 0); if (err < 0) { av_dict_free(&options); ocaml_avutil_raise_error(err); } } stream_t *stream = new_subtitle_stream(Av_val(_av), codec, &options); // Return unused keys count = av_dict_count(options); unused = caml_alloc_tuple(count); AVDictionaryEntry *entry = NULL; for (i = 0; i < count; i++) { entry = av_dict_get(options, "", entry, AV_DICT_IGNORE_SUFFIX); Store_field(unused, i, caml_copy_string(entry->key)); } av_dict_free(&options); ans = caml_alloc_tuple(2); Store_field(ans, 0, Val_int(stream->index)); Store_field(ans, 1, unused); CAMLreturn(ans); } CAMLprim value ocaml_av_new_data_stream(value _av, value _codec_id, value _time_base) { CAMLparam2(_av, _time_base); CAMLlocal2(ans, unused); const enum AVCodecID codec_id = UnknownCodecID_val(_codec_id); av_t *av = Av_val(_av); stream_t *stream = new_stream(av, NULL); AVStream *s = av->format_context->streams[stream->index]; s->time_base = rational_of_value(_time_base); s->codecpar->codec_type = AVMEDIA_TYPE_DATA; s->codecpar->codec_id = codec_id; CAMLreturn(Val_int(stream->index)); } CAMLprim value ocaml_av_write_stream_packet(value _stream, value _time_base, value _packet) { CAMLparam3(_stream, _time_base, _packet); CAMLlocal1(_av); _av = Field(_stream, 0); av_t *av = Av_val(_av); int ret = 0, stream_index = StreamIndex_val(_stream); AVPacket *packet = Packet_val(_packet); AVStream *avstream = av->format_context->streams[stream_index]; if (!av->streams) Fail("Failed to write in closed output"); if (!av->streams[stream_index]) caml_failwith("Internal error"); caml_release_runtime_system(); if (!av->header_written) { // write output file header ret = avformat_write_header(av->format_context, NULL); if (ret < 0) { caml_acquire_runtime_system(); ocaml_avutil_raise_error(ret); } av->header_written = 1; } packet->stream_index = stream_index; packet->pos = -1; av_packet_rescale_ts(packet, rational_of_value(_time_base), avstream->time_base); ret = av->write_frame(av->format_context, packet); av->streams[stream_index]->was_keyframe = packet->flags & AV_PKT_FLAG_KEY; caml_acquire_runtime_system(); if (ret < 0) ocaml_avutil_raise_error(ret); CAMLreturn(Val_unit); } static void write_frame(av_t *av, int stream_index, AVCodecContext *enc_ctx, AVFrame *frame) { AVStream *avstream = av->format_context->streams[stream_index]; AVFrame *hw_frame = NULL; int ret; caml_release_runtime_system(); if (!av->header_written) { // write output file header ret = avformat_write_header(av->format_context, NULL); if (ret < 0) { caml_acquire_runtime_system(); ocaml_avutil_raise_error(ret); } av->header_written = 1; } AVPacket *packet = av_packet_alloc(); if (!packet) { caml_acquire_runtime_system(); caml_raise_out_of_memory(); } packet->data = NULL; packet->size = 0; if (enc_ctx->hw_frames_ctx && frame) { hw_frame = av_frame_alloc(); if (!hw_frame) { av_packet_free(&packet); caml_acquire_runtime_system(); caml_raise_out_of_memory(); } ret = av_hwframe_get_buffer(enc_ctx->hw_frames_ctx, hw_frame, 0); if (ret < 0) { av_frame_free(&hw_frame); av_packet_free(&packet); caml_acquire_runtime_system(); ocaml_avutil_raise_error(ret); } if (!hw_frame->hw_frames_ctx) { av_frame_free(&hw_frame); av_packet_free(&packet); caml_acquire_runtime_system(); caml_raise_out_of_memory(); } ret = av_hwframe_transfer_data(hw_frame, frame, 0); if (ret < 0) { av_frame_free(&hw_frame); av_packet_free(&packet); caml_acquire_runtime_system(); ocaml_avutil_raise_error(ret); } frame = hw_frame; } // send the frame for encoding ret = avcodec_send_frame(enc_ctx, frame); if (!frame && ret == AVERROR_EOF) { av_packet_free(&packet); caml_acquire_runtime_system(); return; } if (frame && ret == AVERROR_EOF) { if (hw_frame) av_frame_free(&hw_frame); av_packet_free(&packet); caml_acquire_runtime_system(); ocaml_avutil_raise_error(ret); } if (ret < 0 && ret != AVERROR_EOF) { if (hw_frame) av_frame_free(&hw_frame); av_packet_free(&packet); caml_acquire_runtime_system(); ocaml_avutil_raise_error(ret); } int was_keyframe = 0; // read all the available output packets (in general there may be any number // of them while (ret >= 0) { ret = avcodec_receive_packet(enc_ctx, packet); if (ret < 0) break; if (packet->flags & AV_PKT_FLAG_KEY) was_keyframe = 1; packet->stream_index = stream_index; packet->pos = -1; av_packet_rescale_ts(packet, enc_ctx->time_base, avstream->time_base); ret = av->write_frame(av->format_context, packet); } if (hw_frame) av_frame_free(&hw_frame); av_packet_free(&packet); av->streams[stream_index]->was_keyframe = was_keyframe; caml_acquire_runtime_system(); if (!frame && ret == AVERROR_EOF) return; if (ret < 0 && ret != AVERROR(EAGAIN)) ocaml_avutil_raise_error(ret); } static void write_audio_frame(av_t *av, int stream_index, AVFrame *frame) { int err, frame_size; if (av->format_context->nb_streams < stream_index) Fail("Stream index not found!"); stream_t *stream = av->streams[stream_index]; if (!stream->codec_context) Fail("Could not find stream index"); AVCodecContext *enc_ctx = stream->codec_context; write_frame(av, stream_index, enc_ctx, frame); } static void write_video_frame(av_t *av, int stream_index, AVFrame *frame) { if (av->format_context->nb_streams < stream_index) Fail("Stream index not found!"); if (!av->streams) Fail("Failed to write in closed output"); stream_t *stream = av->streams[stream_index]; if (!stream->codec_context) Fail("Failed to write video frame with no encoder"); AVCodecContext *enc_ctx = stream->codec_context; write_frame(av, stream_index, enc_ctx, frame); } static void write_subtitle_frame(av_t *av, int stream_index, AVSubtitle *subtitle) { stream_t *stream = av->streams[stream_index]; if (av->format_context->nb_streams < stream_index) Fail("Stream index not found!"); AVStream *avstream = av->format_context->streams[stream->index]; AVCodecContext *enc_ctx = stream->codec_context; if (!stream->codec_context) Fail("Failed to write subtitle frame with no encoder"); int err; int size = 512; AVPacket *packet = av_packet_alloc(); if (!packet) { caml_raise_out_of_memory(); } packet->data = NULL; packet->size = 0; err = av_new_packet(packet, size); if (err < 0) { av_packet_free(&packet); ocaml_avutil_raise_error(err); } caml_release_runtime_system(); err = avcodec_encode_subtitle(stream->codec_context, packet->data, packet->size, subtitle); caml_acquire_runtime_system(); if (err < 0) { av_packet_free(&packet); ocaml_avutil_raise_error(err); } packet->pts = subtitle->pts; packet->duration = subtitle->end_display_time - subtitle->pts; packet->dts = subtitle->pts; av_packet_rescale_ts(packet, enc_ctx->time_base, avstream->time_base); packet->stream_index = stream_index; packet->pos = -1; caml_release_runtime_system(); err = av->write_frame(av->format_context, packet); caml_acquire_runtime_system(); av_packet_free(&packet); if (err < 0) ocaml_avutil_raise_error(err); } CAMLprim value ocaml_av_write_stream_frame(value _stream, value _frame) { CAMLparam2(_stream, _frame); CAMLlocal1(_av); _av = Field(_stream, 0); av_t *av = Av_val(_av); int index = StreamIndex_val(_stream); if (!av->streams) Fail("Invalid input: no streams provided"); enum AVMediaType type = av->streams[index]->codec_context->codec_type; if (type == AVMEDIA_TYPE_AUDIO) { write_audio_frame(av, index, Frame_val(_frame)); } else if (type == AVMEDIA_TYPE_VIDEO) { write_video_frame(av, index, Frame_val(_frame)); } else if (type == AVMEDIA_TYPE_SUBTITLE) { write_subtitle_frame(av, index, Subtitle_val(_frame)); } CAMLreturn(Val_unit); } CAMLprim value ocaml_av_flush(value _av) { CAMLparam1(_av); av_t *av = Av_val(_av); int ret; if (!av->header_written) CAMLreturn(Val_unit); caml_release_runtime_system(); ret = av->write_frame(av->format_context, NULL); if (ret >= 0 && av->format_context->pb) avio_flush(av->format_context->pb); caml_acquire_runtime_system(); if (ret < 0) ocaml_avutil_raise_error(ret); CAMLreturn(Val_unit); } CAMLprim value ocaml_av_was_keyframe(value _stream) { CAMLparam1(_stream); CAMLlocal1(_av); _av = Field(_stream, 0); av_t *av = Av_val(_av); int index = StreamIndex_val(_stream); if (!av->streams) Fail("Invalid input: no streams provided"); CAMLreturn(Val_bool(av->streams[index]->was_keyframe)); } CAMLprim value ocaml_av_write_audio_frame(value _av, value _frame) { CAMLparam2(_av, _frame); av_t *av = Av_val(_av); AVFrame *frame = Frame_val(_frame); write_audio_frame(av, 0, frame); CAMLreturn(Val_unit); } CAMLprim value ocaml_av_write_video_frame(value _av, value _frame) { CAMLparam2(_av, _frame); av_t *av = Av_val(_av); AVFrame *frame = Frame_val(_frame); write_video_frame(av, 0, frame); CAMLreturn(Val_unit); } CAMLprim value ocaml_av_close(value _av) { CAMLparam1(_av); av_t *av = Av_val(_av); if (!av->is_input && av->streams) { // flush encoders of the output file unsigned int i; for (i = 0; i < av->format_context->nb_streams; i++) { AVCodecContext *enc_ctx = av->streams[i]->codec_context; if (!enc_ctx) continue; if (enc_ctx->codec_type == AVMEDIA_TYPE_AUDIO) { write_audio_frame(av, i, NULL); } else if (enc_ctx->codec_type == AVMEDIA_TYPE_VIDEO) { write_video_frame(av, i, NULL); } } // write the trailer if (av->header_written) { caml_release_runtime_system(); av_write_trailer(av->format_context); caml_acquire_runtime_system(); } } close_av(av); CAMLreturn(Val_unit); } CAMLprim value ocaml_av_cleanup_av(value _av) { CAMLparam1(_av); av_t *av = Av_base_val(_av); close_av(av); CAMLreturn(Val_unit); } // This is from libavformat/avc.h uint8_t *ocaml_av_ff_nal_unit_extract_rbsp(const uint8_t *src, uint32_t src_len, uint32_t *dst_len, int header_len) { uint8_t *dst; uint32_t i, len; dst = av_malloc(src_len + AV_INPUT_BUFFER_PADDING_SIZE); if (!dst) return NULL; /* NAL unit header */ i = len = 0; while (i < header_len && i < src_len) dst[len++] = src[i++]; while (i + 2 < src_len) if (!src[i] && !src[i + 1] && src[i + 2] == 3) { dst[len++] = src[i++]; dst[len++] = src[i++]; i++; // remove emulation_prevention_three_byte } else dst[len++] = src[i++]; while (i < src_len) dst[len++] = src[i++]; memset(dst + len, 0, AV_INPUT_BUFFER_PADDING_SIZE); *dst_len = len; return dst; } // This from libavformat/hlsenc.c CAMLprim value ocaml_av_codec_attr(value _stream) { CAMLparam1(_stream); CAMLlocal3(ans, _attr, _av); char attr[32]; _av = Field(_stream, 0); av_t *av = Av_val(_av); int index = StreamIndex_val(_stream); if (!av->format_context || !av->format_context->streams) CAMLreturn(Val_none); AVStream *stream = av->format_context->streams[index]; if (!stream) CAMLreturn(Val_none); if (stream->codecpar->codec_id == AV_CODEC_ID_H264) { uint8_t *data = stream->codecpar->extradata; if (data && (data[0] | data[1] | data[2]) == 0 && data[3] == 1 && (data[4] & 0x1F) == 7) { snprintf(attr, sizeof(attr), "avc1.%02x%02x%02x", data[5], data[6], data[7]); } else { goto fail; } } else if (stream->codecpar->codec_id == AV_CODEC_ID_FLAC) { snprintf(attr, sizeof(attr), "fLaC"); } else if (stream->codecpar->codec_id == AV_CODEC_ID_HEVC) { uint8_t *data = stream->codecpar->extradata; int profile = FF_PROFILE_UNKNOWN; int level = FF_LEVEL_UNKNOWN; if (stream->codecpar->profile != FF_PROFILE_UNKNOWN) profile = stream->codecpar->profile; if (stream->codecpar->level != FF_LEVEL_UNKNOWN) level = stream->codecpar->level; /* check the boundary of data which from current position is small than * extradata_size */ while (data && (data - stream->codecpar->extradata + 19) < stream->codecpar->extradata_size) { /* get HEVC SPS NAL and seek to profile_tier_level */ if (!(data[0] | data[1] | data[2]) && data[3] == 1 && ((data[4] & 0x7E) == 0x42)) { uint8_t *rbsp_buf; int remain_size = 0; uint32_t rbsp_size = 0; /* skip start code + nalu header */ data += 6; /* process by reference General NAL unit syntax */ remain_size = stream->codecpar->extradata_size - (data - stream->codecpar->extradata); rbsp_buf = ocaml_av_ff_nal_unit_extract_rbsp(data, remain_size, &rbsp_size, 0); if (!rbsp_buf) goto fail; if (rbsp_size < 13) { av_freep(&rbsp_buf); goto fail; } /* skip sps_video_parameter_set_id u(4), * sps_max_sub_layers_minus1 u(3), * and sps_temporal_id_nesting_flag u(1) */ profile = rbsp_buf[1] & 0x1f; /* skip 8 + 8 + 32 + 4 + 43 + 1 bit */ level = rbsp_buf[12]; av_freep(&rbsp_buf); goto fail; } data++; } if (stream->codecpar->codec_tag == MKTAG('h', 'v', 'c', '1') && profile != FF_PROFILE_UNKNOWN && level != FF_LEVEL_UNKNOWN) { snprintf(attr, sizeof(attr), "%s.%d.4.L%d.B01", av_fourcc2str(stream->codecpar->codec_tag), profile, level); } else snprintf(attr, sizeof(attr), "%s", av_fourcc2str(stream->codecpar->codec_tag)); } else if (stream->codecpar->codec_id == AV_CODEC_ID_MP2) { snprintf(attr, sizeof(attr), "mp4a.40.33"); } else if (stream->codecpar->codec_id == AV_CODEC_ID_MP3) { snprintf(attr, sizeof(attr), "mp4a.40.34"); } else if (stream->codecpar->codec_id == AV_CODEC_ID_AAC) { if (stream->codecpar->profile != FF_PROFILE_UNKNOWN) snprintf(attr, sizeof(attr), "mp4a.40.%d", stream->codecpar->profile + 1); else goto fail; } else if (stream->codecpar->codec_id == AV_CODEC_ID_AC3) { snprintf(attr, sizeof(attr), "ac-3"); } else if (stream->codecpar->codec_id == AV_CODEC_ID_EAC3) { snprintf(attr, sizeof(attr), "ec-3"); } else { fail: CAMLreturn(Val_none); } _attr = caml_copy_string(attr); ans = caml_alloc_tuple(1); Store_field(ans, 0, _attr); CAMLreturn(ans); } CAMLprim value ocaml_av_stream_bitrate(value _stream) { CAMLparam1(_stream); CAMLlocal2(ans, _av); _av = Field(_stream, 0); av_t *av = Av_val(_av); int index = StreamIndex_val(_stream); if (!av->format_context || !av->format_context->streams) CAMLreturn(Val_none); AVStream *stream = av->format_context->streams[index]; if (!stream) CAMLreturn(Val_none); AVCPBProperties *props = (AVCPBProperties *)av_stream_get_side_data( stream, AV_PKT_DATA_CPB_PROPERTIES, NULL); if (!stream->codecpar->bit_rate && !props) CAMLreturn(Val_none); ans = caml_alloc_tuple(1); if (stream->codecpar->bit_rate) { Store_field(ans, 0, Val_int(stream->codecpar->bit_rate)); } else if (props) { Store_field(ans, 0, Val_int(props->max_bitrate)); } CAMLreturn(ans); } ocaml-ffmpeg-1.1.11/av/av_stubs.h000066400000000000000000000017011457634536500165560ustar00rootroot00000000000000#ifndef _AV_STUBS_H_ #define _AV_STUBS_H_ #include #include AVFormatContext *ocaml_av_get_format_context(value *p_av); #if LIBAVFORMAT_VERSION_INT <= AV_VERSION_INT(59, 0, 100) #define avioformat_const #else #define avioformat_const const #endif /***** AVInputFormat *****/ #define InputFormat_val(v) (*(avioformat_const AVInputFormat**)Data_abstract_val(v)) void value_of_inputFormat(avioformat_const AVInputFormat *inputFormat, value * p_value); /***** AVOutputFormat *****/ #define OutputFormat_val(v) (*(avioformat_const AVOutputFormat**)Data_abstract_val(v)) value value_of_outputFormat(avioformat_const AVOutputFormat *outputFormat); /***** Control message *****/ value * ocaml_av_get_control_message_callback(struct AVFormatContext *ctx); void ocaml_av_set_control_message_callback(value *p_av, av_format_control_message c_callback, value *p_ocaml_callback); #endif // _AV_STUBS_H_ ocaml-ffmpeg-1.1.11/av/config/000077500000000000000000000000001457634536500160255ustar00rootroot00000000000000ocaml-ffmpeg-1.1.11/av/config/discover.ml000066400000000000000000000020611457634536500201740ustar00rootroot00000000000000module C = Configurator.V1 let packages = [("avutil", "55.78.100"); ("avformat", "57.83.100")] let () = C.main ~name:"ffmpeg-av-pkg-config" (fun c -> let default : C.Pkg_config.package_conf = { libs = List.map (fun (p, _) -> Printf.sprintf "-l%s" p) packages; cflags = []; } in let conf = match C.Pkg_config.get c with | None -> default | Some pc -> ( let package = String.concat " " (List.map (fun (p, _) -> Printf.sprintf "lib%s" p) packages) in let expr = String.concat ", " (List.map (fun (p, v) -> Printf.sprintf "lib%s >= %s" p v) packages) in match C.Pkg_config.query_expr_err pc ~package ~expr with | Error msg -> failwith msg | Ok deps -> deps) in C.Flags.write_sexp "c_flags.sexp" conf.cflags; C.Flags.write_sexp "c_library_flags.sexp" conf.libs) ocaml-ffmpeg-1.1.11/av/config/dune000066400000000000000000000000751457634536500167050ustar00rootroot00000000000000(executable (name discover) (libraries dune.configurator)) ocaml-ffmpeg-1.1.11/av/dune000066400000000000000000000006161457634536500154410ustar00rootroot00000000000000(library (name av) (public_name ffmpeg-av) (synopsis "Bindings to ffmpeg's av library") (foreign_stubs (language c) (names av_stubs) (flags (:include c_flags.sexp))) (c_library_flags (:include c_library_flags.sexp)) (install_c_headers av_stubs) (libraries ffmpeg-avutil ffmpeg-avcodec)) (rule (targets c_flags.sexp c_library_flags.sexp) (action (run ./config/discover.exe))) ocaml-ffmpeg-1.1.11/avcodec/000077500000000000000000000000001457634536500155565ustar00rootroot00000000000000ocaml-ffmpeg-1.1.11/avcodec/avcodec.ml000066400000000000000000000513651457634536500175260ustar00rootroot00000000000000open Avutil module Ba = Bigarray.Array1 type ('media, 'mode) codec type 'media params type 'media decoder type 'media encoder type encode = [ `Encoder ] type decode = [ `Decoder ] type profile = { id : int; profile_name : string } type descriptor = { media_type : Avutil.media_type; name : string; long_name : string option; properties : Codec_properties.t list; mime_types : string list; profiles : profile list; } external flag_qscale : unit -> int = "ocaml_avcodec_flag_qscale" let flag_qscale = flag_qscale () external params : 'media encoder -> 'media params = "ocaml_avcodec_encoder_params" external time_base : 'media encoder -> Avutil.rational = "ocaml_avcodec_encoder_time_base" external get_name : _ codec -> string = "ocaml_avcodec_get_name" external get_description : _ codec -> string = "ocaml_avcodec_get_description" external init : unit -> unit = "ocaml_avcodec_init" [@@noalloc] let () = init () external get_next_codec : unit option -> (('a, 'b) codec * 'c * bool * unit option) option = "ocaml_avcodec_get_next_codec" let all_codecs = let rec f cur h = match get_next_codec h with | None -> cur | Some (codec, id, is_encoder, h) -> f ((codec, id, is_encoder) :: cur) h in f [] None external get_input_buffer_padding_size : unit -> int = "ocaml_avcodec_get_input_buffer_padding_size" let input_buffer_padding_size = get_input_buffer_padding_size () let empty_data = create_data 0 external name : _ codec -> string = "ocaml_avcodec_name" type capability = Codec_capabilities.t (* To be used with Audio.t and Video.t *) external capabilities : ([< `Audio | `Video ], encode) codec -> capability array = "ocaml_avcodec_capabilities" let capabilities c = Array.to_list (capabilities c) let mk_descriptor d = Option.map (fun (media_type, name, long_name, properties, mime_types, profiles) -> { media_type; name; long_name; properties = Array.to_list properties; mime_types = Array.to_list mime_types; profiles = Array.to_list profiles; }) d external params_descriptor : 'media params -> (Avutil.media_type * string * string option * Codec_properties.t array * string array * profile array) option = "ocaml_avcodec_params_descriptor" let descriptor p = mk_descriptor (params_descriptor p) type hw_config_method = Hw_config_method.t type hw_config = { pixel_format : Pixel_format.t; methods : hw_config_method list; device_type : HwContext.device_type; } external hw_configs : ([< `Audio | `Video ], _) codec -> hw_config list = "ocaml_avcodec_hw_methods" (** Packet. *) module Packet = struct (** Packet type *) type 'a t external create : string -> 'a t = "ocaml_avcodec_create_packet" external content : 'a t -> string = "ocaml_avcodec_packet_content" type flag = [ `Keyframe | `Corrupt | `Discard | `Trusted | `Disposable ] external int_of_flag : flag -> int = "ocaml_avcodec_int_of_flag" external get_flags : 'a t -> int = "ocaml_avcodec_get_flags" let get_flags p = let flags = get_flags p in List.fold_left (fun cur flag -> if int_of_flag flag land flags <> 0 then flag :: cur else cur) [] [`Keyframe; `Corrupt; `Discard; `Trusted; `Disposable] type replaygain = { track_gain : int; track_peak : int; album_gain : int; album_peak : int; } type side_data = [ `Replaygain of replaygain | `Strings_metadata of (string * string) list | `Metadata_update of (string * string) list ] type _side_data = [ `Replaygain of replaygain | `Strings_metadata of string | `Metadata_update of string ] external add_side_data : 'media t -> _side_data -> unit = "ocaml_avcodec_packet_add_side_data" let concat_meta m = String.concat "\000" (List.map (fun (k, v) -> k ^ "\000" ^ v) m) let add_side_data p d = let d = match d with | `Replaygain r -> `Replaygain r | `Strings_metadata m -> `Strings_metadata (concat_meta m) | `Metadata_update m -> `Metadata_update (concat_meta m) in add_side_data p d external side_data : _ t -> _side_data array = "ocaml_avcodec_packet_side_data" let split_metadata s = let rec split_meta m = function | [_] | [] -> m | k :: v :: l -> split_meta ((k, v) :: m) l in split_meta [] (String.split_on_char '\000' s) let side_data packet = List.map (function | `Replaygain r -> `Replaygain r | `Strings_metadata s -> `Strings_metadata (split_metadata s) | `Metadata_update s -> `Metadata_update (split_metadata s)) (Array.to_list (side_data packet)) external dup : 'a t -> 'a t = "ocaml_avcodec_packet_dup" external get_size : 'a t -> int = "ocaml_avcodec_get_packet_size" external get_stream_index : 'a t -> int = "ocaml_avcodec_get_packet_stream_index" external set_stream_index : 'a t -> int -> unit = "ocaml_avcodec_set_packet_stream_index" external get_pts : 'a t -> Int64.t option = "ocaml_avcodec_get_packet_pts" external set_pts : 'a t -> Int64.t option -> unit = "ocaml_avcodec_set_packet_pts" external get_dts : 'a t -> Int64.t option = "ocaml_avcodec_get_packet_dts" external set_dts : 'a t -> Int64.t option -> unit = "ocaml_avcodec_set_packet_dts" external get_duration : 'a t -> Int64.t option = "ocaml_avcodec_get_packet_duration" external set_duration : 'a t -> Int64.t option -> unit = "ocaml_avcodec_set_packet_duration" external get_position : 'a t -> Int64.t option = "ocaml_avcodec_get_packet_position" external set_position : 'a t -> Int64.t option -> unit = "ocaml_avcodec_set_packet_position" external to_bytes : 'a t -> bytes = "ocaml_avcodec_packet_to_bytes" type 'a parser_t type 'a parser = { mutable buf : data; mutable remainder : data; parser : 'a parser_t; } (* This is an internal function, which receives any type of AVCodec in the C code. *) external create_parser : 'a params option -> 'b -> 'a parser_t = "ocaml_avcodec_create_parser" let create_parser ?params codec = { buf = empty_data; remainder = empty_data; parser = create_parser params codec; } external parse_packet : 'a parser_t -> data -> int -> int -> ('m t * int) option = "ocaml_avcodec_parse_packet" let rec buf_loop ctx f buf ofs len = match parse_packet ctx.parser buf ofs len with | Some (pkt, l) -> f pkt; buf_loop ctx f buf (ofs + l) (len - l) | None -> ofs let parse_data ctx f data = let remainder_len = Ba.dim ctx.remainder in let data_len = Ba.dim data in let actual_len = remainder_len + data_len in let needed_len = actual_len + input_buffer_padding_size in let buf_len = Ba.dim ctx.buf in let buf = if needed_len > buf_len then create_data needed_len else ctx.buf in if remainder_len > 0 then Ba.blit ctx.remainder (Ba.sub buf 0 remainder_len); Ba.blit data (Ba.sub buf remainder_len data_len); if needed_len <> buf_len then Ba.fill (Ba.sub buf actual_len input_buffer_padding_size) 0; let parsed_len = buf_loop ctx f buf 0 actual_len in ctx.buf <- buf; ctx.remainder <- Ba.sub buf parsed_len (actual_len - parsed_len) let parse_bytes ctx f bytes len = let data = create_data len in for i = 0 to len - 1 do data.{i} <- int_of_char (Bytes.get bytes i) done; parse_data ctx f data end (* These functions receive AVCodecParameters and AVCodec on the C side. *) external create_decoder : ?params:'a params -> 'b -> 'a decoder = "ocaml_avcodec_create_decoder" (** Audio codecs. *) module Audio = struct (** Audio codec ids *) type 'mode t = (audio, 'mode) codec type id = Codec_id.audio external audio_descriptor : id -> (Avutil.media_type * string * string option * Codec_properties.t array * string array * profile array) option = "ocaml_avcodec_audio_descriptor" let descriptor id = mk_descriptor (audio_descriptor id) let codec_ids = Codec_id.audio let get_name = get_name let get_description = get_description let encoders = List.filter_map (function | c, Some id, true when List.mem id codec_ids -> Some (Obj.magic c) | _ -> None) all_codecs let decoders = List.filter_map (function | c, Some id, false when List.mem id codec_ids -> Some (Obj.magic c) | _ -> None) all_codecs external frame_size : audio encoder -> int = "ocaml_avcodec_frame_size" external get_id : _ t -> id = "ocaml_avcodec_get_audio_codec_id" external string_of_id : id -> string = "ocaml_avcodec_get_audio_codec_id_name" external find_encoder_by_name : string -> [ `Encoder ] t = "ocaml_avcodec_find_audio_encoder_by_name" external find_encoder : id -> [ `Encoder ] t = "ocaml_avcodec_find_audio_encoder" external find_decoder_by_name : string -> [ `Decoder ] t = "ocaml_avcodec_find_audio_decoder_by_name" external find_decoder : id -> [ `Decoder ] t = "ocaml_avcodec_find_audio_decoder" external get_supported_channel_layouts : _ t -> Avutil.Channel_layout.t list = "ocaml_avcodec_get_supported_channel_layouts" let get_supported_channel_layouts codec = List.rev (get_supported_channel_layouts codec) let find_best_channel_layout codec default = try let channel_layouts = get_supported_channel_layouts codec in if List.mem default channel_layouts then default else (match channel_layouts with h :: _ -> h | [] -> default) with Not_found -> default external get_supported_sample_formats : _ t -> Avutil.Sample_format.t list = "ocaml_avcodec_get_supported_sample_formats" let get_supported_sample_formats codec = List.rev (get_supported_sample_formats codec) let find_best_sample_format codec default = try let formats = get_supported_sample_formats codec in if List.mem default formats then default else (match formats with h :: _ -> h | [] -> default) with Not_found -> default external get_supported_sample_rates : _ t -> int list = "ocaml_avcodec_get_supported_sample_rates" let get_supported_sample_rates codec = List.rev (get_supported_sample_rates codec) let find_best_sample_rate codec default = let rates = get_supported_sample_rates codec in if List.mem default rates then default else (match rates with h :: _ -> h | [] -> default) external get_params_id : audio params -> id = "ocaml_avcodec_parameters_get_audio_codec_id" external get_channel_layout : audio params -> Avutil.Channel_layout.t = "ocaml_avcodec_parameters_get_channel_layout" external get_nb_channels : audio params -> int = "ocaml_avcodec_parameters_get_nb_channels" external get_sample_format : audio params -> Avutil.Sample_format.t = "ocaml_avcodec_parameters_get_sample_format" external get_bit_rate : audio params -> int = "ocaml_avcodec_parameters_get_bit_rate" external get_sample_rate : audio params -> int = "ocaml_avcodec_parameters_get_sample_rate" let create_parser = Packet.create_parser let create_decoder = create_decoder external sample_format : audio decoder -> Sample_format.t = "ocaml_avcodec_sample_format" external create_encoder : int -> [ `Encoder ] t -> int -> (string * string) array -> audio encoder * string array = "ocaml_avcodec_create_audio_encoder" let create_encoder ?opts ?channels ?channel_layout ~sample_rate ~sample_format ~time_base codec = let opts = opts_default opts in let _opts = mk_audio_opts ~opts ?channels ?channel_layout ~sample_rate ~sample_format ~time_base () in let channels = match (channels, channel_layout) with | Some n, _ -> n | None, Some layout -> Avutil.Channel_layout.get_nb_channels layout | None, None -> raise (Error (`Failure "At least one of channels or channel_layout must be passed!")) in let encoder, unused = create_encoder (Sample_format.get_id sample_format) codec channels (mk_opts_array _opts) in filter_opts unused opts; encoder end (** Video codecs. *) module Video = struct type 'mode t = (video, 'mode) codec type id = Codec_id.video external video_descriptor : id -> (Avutil.media_type * string * string option * Codec_properties.t array * string array * profile array) option = "ocaml_avcodec_video_descriptor" let descriptor id = mk_descriptor (video_descriptor id) let codec_ids = Codec_id.video let encoders = List.filter_map (function | c, Some id, true when List.mem id codec_ids -> Some (Obj.magic c) | _ -> None) all_codecs let decoders = List.filter_map (function | c, Some id, false when List.mem id codec_ids -> Some (Obj.magic c) | _ -> None) all_codecs let get_name = get_name let get_description = get_description external get_id : _ t -> id = "ocaml_avcodec_get_video_codec_id" external string_of_id : id -> string = "ocaml_avcodec_get_video_codec_id_name" external find_encoder_by_name : string -> [ `Encoder ] t = "ocaml_avcodec_find_video_encoder_by_name" external find_encoder : id -> [ `Encoder ] t = "ocaml_avcodec_find_video_encoder" external find_decoder_by_name : string -> [ `Decoder ] t = "ocaml_avcodec_find_video_decoder_by_name" external find_decoder : id -> [ `Decoder ] t = "ocaml_avcodec_find_video_decoder" external get_supported_frame_rates : _ t -> Avutil.rational list = "ocaml_avcodec_get_supported_frame_rates" let get_supported_frame_rates codec = List.rev (get_supported_frame_rates codec) let find_best_frame_rate codec default = let frame_rates = get_supported_frame_rates codec in if List.mem default frame_rates then default else (match frame_rates with h :: _ -> h | [] -> default) external get_supported_pixel_formats : _ t -> Avutil.Pixel_format.t list = "ocaml_avcodec_get_supported_pixel_formats" let get_supported_pixel_formats codec = List.rev (get_supported_pixel_formats codec) let find_best_pixel_format ?(hwaccel = false) codec default = let formats = get_supported_pixel_formats codec in if List.mem default formats then default else ( let formats = if hwaccel then formats else List.filter (fun f -> not (List.mem `Hwaccel Avutil.Pixel_format.((descriptor f).flags))) formats in match formats with p :: _ -> p | [] -> default) external get_params_id : video params -> id = "ocaml_avcodec_parameters_get_video_codec_id" external get_width : video params -> int = "ocaml_avcodec_parameters_get_width" external get_height : video params -> int = "ocaml_avcodec_parameters_get_height" external get_sample_aspect_ratio : video params -> Avutil.rational = "ocaml_avcodec_parameters_get_sample_aspect_ratio" external get_pixel_format : video params -> Avutil.Pixel_format.t option = "ocaml_avcodec_parameters_get_pixel_format" external get_pixel_aspect : video params -> Avutil.rational option = "ocaml_avcodec_parameters_get_pixel_aspect" external get_bit_rate : video params -> int = "ocaml_avcodec_parameters_get_bit_rate" let create_parser = Packet.create_parser let create_decoder = create_decoder type hardware_context = [ `Device_context of HwContext.device_context | `Frame_context of HwContext.frame_context ] external create_encoder : ?device_context:Avutil.HwContext.device_context -> ?frame_context:Avutil.HwContext.frame_context -> int -> [ `Encoder ] t -> (string * string) array -> video encoder * string array = "ocaml_avcodec_create_video_encoder" let create_encoder ?opts ?frame_rate ?hardware_context ~pixel_format ~width ~height ~time_base codec = let opts = opts_default opts in let _opts = mk_video_opts ~opts ?frame_rate ~pixel_format ~width ~height ~time_base () in let device_context, frame_context = match hardware_context with | None -> (None, None) | Some (`Device_context hardware_context) -> (Some hardware_context, None) | Some (`Frame_context frame_context) -> (None, Some frame_context) in let encoder, unused = create_encoder ?device_context ?frame_context (Pixel_format.get_id pixel_format) codec (mk_opts_array _opts) in filter_opts unused opts; encoder end (** Subtitle codecs. *) module Subtitle = struct type 'mode t = (subtitle, 'mode) codec type id = Codec_id.subtitle external subtitle_descriptor : id -> (Avutil.media_type * string * string option * Codec_properties.t array * string array * profile array) option = "ocaml_avcodec_subtitle_descriptor" let descriptor id = mk_descriptor (subtitle_descriptor id) let codec_ids = Codec_id.subtitle let encoders = List.filter_map (function | c, Some id, true when List.mem id codec_ids -> Some (Obj.magic c) | _ -> None) all_codecs let decoders = List.filter_map (function | c, Some id, false when List.mem id codec_ids -> Some (Obj.magic c) | _ -> None) all_codecs let get_name = get_name let get_description = get_description external get_id : _ t -> id = "ocaml_avcodec_get_subtitle_codec_id" external string_of_id : id -> string = "ocaml_avcodec_get_subtitle_codec_id_name" external find_encoder_by_name : string -> [ `Encoder ] t = "ocaml_avcodec_find_subtitle_encoder_by_name" external find_encoder : id -> [ `Encoder ] t = "ocaml_avcodec_find_subtitle_encoder" external find_decoder_by_name : string -> [ `Decoder ] t = "ocaml_avcodec_find_subtitle_decoder_by_name" external find_decoder : id -> [ `Decoder ] t = "ocaml_avcodec_find_subtitle_decoder" external get_params_id : subtitle params -> id = "ocaml_avcodec_parameters_get_subtitle_codec_id" end module Unknown = struct type 'mode t = ([ `Data ], 'mode) codec type id = Codec_id.unknown let codec_ids = Codec_id.unknown external get_params_id : [ `Data ] params -> id = "ocaml_avcodec_parameters_get_unknown_codec_id" external string_of_id : id -> string = "ocaml_avcodec_get_unknown_codec_id_name" end external _send_packet : 'media decoder -> 'media Packet.t -> unit = "ocaml_avcodec_send_packet" external _receive_frame : 'media decoder -> 'media frame option = "ocaml_avcodec_receive_frame" external _flush_decoder : 'media decoder -> unit = "ocaml_avcodec_flush_decoder" let rec receive_frame decoder f = match _receive_frame decoder with | Some frame -> f frame; receive_frame decoder f | None -> () let decode decoder f packet = _send_packet decoder packet; receive_frame decoder f let flush_decoder decoder f = try _flush_decoder decoder; receive_frame decoder f with Avutil.Error `Eof -> () external _send_frame : 'media encoder -> 'media frame -> unit = "ocaml_avcodec_send_frame" external _receive_packet : 'media encoder -> 'media Packet.t option = "ocaml_avcodec_receive_packet" external _flush_encoder : 'media encoder -> unit = "ocaml_avcodec_flush_encoder" let rec receive_packet encoder f = match _receive_packet encoder with | Some packet -> f packet; receive_packet encoder f | None -> () let encode encoder f frame = _send_frame encoder frame; receive_packet encoder f let flush_encoder encoder f = (* First flush remaining packets. *) receive_packet encoder f; _flush_encoder encoder; receive_packet encoder f type id = Codec_id.codec_id external string_of_id : id -> string = "ocaml_avcodec_get_codec_id_name" module BitstreamFilter = struct type filter = { name : string; codecs : id list; options : Avutil.Options.t } type 'a t type cursor external get_next : cursor option -> (string * id array * Avutil.Options.t * cursor) option = "ocaml_avcodec_bsf_next" let get_next cursor = Option.map (fun (name, codecs, options, cursor) -> ({ name; codecs = Array.to_list codecs; options }, cursor)) (get_next cursor) let filters = let rec f cursor filters = match get_next cursor with | None -> filters | Some (filter, cursor) -> f (Some cursor) (filter :: filters) in f None [] external init : (string * string) array -> string -> 'a params -> 'a t * 'a params * string array = "ocaml_avcodec_bsf_init" let init ?opts { name; _ } params = let opts = opts_default opts in let filter, params, unused = init (mk_opts_array opts) name params in filter_opts unused opts; (filter, params) external send_packet : 'a t -> 'a Packet.t -> unit = "ocaml_avcodec_bsf_send_packet" external receive_packet : 'a t -> 'a Packet.t = "ocaml_avcodec_bsf_receive_packet" external send_eof : 'a t -> unit = "ocaml_avcodec_bsf_send_eof" end ocaml-ffmpeg-1.1.11/avcodec/avcodec.mli000066400000000000000000000375711457634536500177020ustar00rootroot00000000000000(** This module contains decoders and encoders for audio, video and subtitle codecs. *) open Avutil type ('media, 'mode) codec type 'media params type 'media decoder type 'media encoder type encode = [ `Encoder ] type decode = [ `Decoder ] type profile = { id : int; profile_name : string } type descriptor = { media_type : Avutil.media_type; name : string; long_name : string option; properties : Codec_properties.t list; mime_types : string list; profiles : profile list; } (** {5 Constants} *) val flag_qscale : int (** Get the params of a given encoder *) val params : 'media encoder -> 'media params (** Get the params descriptor. *) val descriptor : 'media params -> descriptor option (** Get the time base of a given encoder. *) val time_base : 'media encoder -> Avutil.rational (** Get the name of a given codec. *) val name : _ codec -> string (** Codec capabilities. *) type capability = Codec_capabilities.t (** Get the encoding capabilities for this codec. *) val capabilities : ([< `Audio | `Video ], encode) codec -> capability list (** Codec hardware config method. *) type hw_config_method = Hw_config_method.t (** Hardward config for the given codec. *) type hw_config = { pixel_format : Pixel_format.t; methods : hw_config_method list; device_type : HwContext.device_type; } (** Get the codec's hardward configs. *) val hw_configs : ([< `Audio | `Video ], _) codec -> hw_config list (** Packet. *) module Packet : sig (** Packet type *) type 'media t (** Parser type *) type 'media parser (** Packet flags *) type flag = [ `Keyframe | `Corrupt | `Discard | `Trusted | `Disposable ] type replaygain = { track_gain : int; track_peak : int; album_gain : int; album_peak : int; } (** Packet side_data (incomplete) *) type side_data = [ `Replaygain of replaygain | `Strings_metadata of (string * string) list | `Metadata_update of (string * string) list ] (** Add a side data to a packet. *) val add_side_data : 'media t -> side_data -> unit (** Return a packet side_data. *) val side_data : 'media t -> side_data list (** Return a fresh packet refereing the same data. *) val dup : 'media t -> 'media t (** Retun the packet flags. *) val get_flags : 'media t -> flag list (** Return the size of the packet. *) val get_size : 'media t -> int (** Return the stream index of the packet. *) val get_stream_index : 'media t -> int (** Set the stream index of the packet. *) val set_stream_index : 'media t -> int -> unit (** Return the packet PTS (Presentation Time) in its stream's base_time unit. *) val get_pts : 'media t -> Int64.t option (** Set the packet PTS (Presentation Time) in its stream's base_time unit. *) val set_pts : 'media t -> Int64.t option -> unit (** Return the packet DTS (Decoding Time) in its stream's base_time unit. *) val get_dts : 'media t -> Int64.t option (** Set the packet DTS (Decoding Time) in its stream's base_time unit. *) val set_dts : 'media t -> Int64.t option -> unit (** Return the packet duration in its stream's base_time unit.*) val get_duration : 'media t -> Int64.t option (** Set the packet duration in its stream's base_time unit.*) val set_duration : 'media t -> Int64.t option -> unit (** Return the packet byte position in stream. *) val get_position : 'media t -> Int64.t option (** Set the packet byte position in stream. *) val set_position : 'media t -> Int64.t option -> unit (** Return a fresh bytes array containing a copy of packet datas. *) val to_bytes : 'media t -> bytes (** [Avcodec.Packet.parse_data parser f data] applies function [f] to the parsed packets frome the [data] array according to the [parser] configuration. Raise Error if the parsing failed. *) val parse_data : 'media parser -> ('media t -> unit) -> data -> unit (** Same as {!Avcodec.Packet.parse_data} with bytes array. *) val parse_bytes : 'media parser -> ('media t -> unit) -> bytes -> int -> unit (** Advanced users: create a packet with the given data. *) val create : string -> 'media t (** Advanced users: return the packet's content. *) val content : 'media t -> string end (** Audio codecs. *) module Audio : sig (** Main types for audio codecs. *) type 'mode t = (audio, 'mode) codec (** Audio codec ids. Careful: different codecs share the same ID, e.g. aac and libfdk_aac *) type id = Codec_id.audio (** Get the codec descriptor. *) val descriptor : id -> descriptor option (** List of all audio codec IDs. *) val codec_ids : Codec_id.audio list (** List of all available audio encoders. *) val encoders : encode t list (** List of all available audio decoders. *) val decoders : decode t list (** Find an encoder from its name. Raise Error if the codec is not found or is not an audio codec. *) val find_encoder_by_name : string -> encode t (** Find an encoder from its id. Raise Error if the codec is not found or is not an audio codec. *) val find_encoder : id -> encode t (** Find a decoder from its name. Raise Error if the codec is not found or is not an audio codec. *) val find_decoder_by_name : string -> decode t (** Find a decoder from its id. Raise Error if the codec is not found or is not an audio codec. *) val find_decoder : id -> decode t (** Return the list of supported channel layouts of the codec. *) val get_supported_channel_layouts : _ t -> Avutil.Channel_layout.t list (** [Avcodec.Audio.find_best_channel_layout codec default] return the best channel layout of the [codec] codec or the [default] value if the codec has no channel layout. *) val find_best_channel_layout : _ t -> Avutil.Channel_layout.t -> Avutil.Channel_layout.t (** Return the list of supported sample formats of the codec. *) val get_supported_sample_formats : _ t -> Avutil.Sample_format.t list (** [Avcodec.Audio.find_best_sample_format codec default] return the best sample format of the [codec] codec or the [default] value if the codec has no sample format. *) val find_best_sample_format : _ t -> Avutil.Sample_format.t -> Avutil.Sample_format.t (** Return the list of supported sample rates of the codec. *) val get_supported_sample_rates : _ t -> int list (** [Avcodec.Audio.find_best_sample_rate codec default] return the best sample rate of the [codec] codec or the [default] value if the codec has no sample rate. *) val find_best_sample_rate : _ t -> int -> int (** [Avcodec.Audio.create_parser codec] create an audio packet parser. Raise Error if the parser creation failed. *) val create_parser : ?params:audio params -> decode t -> audio Packet.parser (** [Avcodec.Audio.create_decoder ~params codec] create an audio decoder. Raise Error if the decoder creation failed. *) val create_decoder : ?params:audio params -> decode t -> audio decoder (** [Avcodec.Audio.sample_format decoder] returns the output sample format for the given decoder. *) val sample_format : audio decoder -> Sample_format.t (** [Avcodec.Audio.create_encoder] create an audio encoder. Params have the same semantics as in [Av.new_audio_stream] Raise Error if the encoder creation failed. *) val create_encoder : ?opts:opts -> ?channels:int -> ?channel_layout:Channel_layout.t -> sample_rate:int -> sample_format:Avutil.Sample_format.t -> time_base:Avutil.rational -> encode t -> audio encoder (** Get the desired frame_size for this encoder. *) val frame_size : audio encoder -> int (** Return the name of a codec. *) val get_name : _ codec -> string (** Return the description of a codec. *) val get_description : _ codec -> string (** Return the name of the codec ID. *) val string_of_id : id -> string (** Return the ID (class) of a codec. *) val get_id : _ t -> id (** Return the id of the codec params. *) val get_params_id : audio params -> id (** Return the channel layout set for the codec params. *) val get_channel_layout : audio params -> Avutil.Channel_layout.t (** Returns the number of channels set for the codec params. *) val get_nb_channels : audio params -> int (** Returns the sample format set for the codec params. *) val get_sample_format : audio params -> Avutil.Sample_format.t (** Returns the bit rate set for the codec params. *) val get_bit_rate : audio params -> int (** Returns the sample rate set for the codec. *) val get_sample_rate : audio params -> int end (** Video codecs. *) module Video : sig (** Main types for video codecs. *) type 'mode t = (video, 'mode) codec (** Video codec ids. Careful: different codecs share the same ID, e.g. aac and libfdk_aac *) type id = Codec_id.video (** Get the codec descriptor. *) val descriptor : id -> descriptor option (** List all video codec IDs. *) val codec_ids : Codec_id.video list (** List of all available video encoders. *) val encoders : encode t list (** List of all available video decoders. *) val decoders : decode t list (** Find an encoder from its name. Raise Error if the codec is not found or is not an audio codec. *) val find_encoder_by_name : string -> encode t (** Find an encoder from its id. Raise Error if the codec is not found or is not an audio codec. *) val find_encoder : id -> encode t (** Find a decoder from its name. Raise Error if the codec is not found or is not an audio codec. *) val find_decoder_by_name : string -> decode t (** Find a decoder from its id. Raise Error if the codec is not found or is not an audio codec. *) val find_decoder : id -> decode t (** Return the list of supported frame rates of the codec. *) val get_supported_frame_rates : _ t -> Avutil.rational list (** [Avcodec.Video.find_best_frame_rate codec default] return the best frame rate of the [codec] codec or the [default] value if the codec has no frame rate. *) val find_best_frame_rate : _ t -> Avutil.rational -> Avutil.rational (** Return the list of supported pixel formats of the codec. *) val get_supported_pixel_formats : _ t -> Avutil.Pixel_format.t list (** [Avcodec.Video.find_best_pixel_format codec default] return the best pixel format of the [codec] codec or the [default] value if the codec has no pixel format. *) val find_best_pixel_format : ?hwaccel:bool -> _ t -> Avutil.Pixel_format.t -> Avutil.Pixel_format.t (** [Avcodec.Video.create_parser codec] create an video packet parser. Raise Error if the parser creation failed. *) val create_parser : ?params:video params -> decode t -> video Packet.parser (** [Avcodec.Video.create_decoder codec] create a video decoder. Raise Error if the decoder creation failed. *) val create_decoder : ?params:video params -> decode t -> video decoder type hardware_context = [ `Device_context of HwContext.device_context | `Frame_context of HwContext.frame_context ] (** [Avcodec.Video.create_encoder] create a video encoder. Params have the same semantics as in [Av.new_video_stream] Raise Error if the encoder creation failed. *) val create_encoder : ?opts:opts -> ?frame_rate:Avutil.rational -> ?hardware_context:hardware_context -> pixel_format:Avutil.Pixel_format.t -> width:int -> height:int -> time_base:Avutil.rational -> encode t -> video encoder (** Return the name of a codec. *) val get_name : _ codec -> string (** Return the description of a codec. *) val get_description : _ codec -> string (** Return the name of the codec. *) val string_of_id : id -> string (** Return the ID (class) of a codec. *) val get_id : _ t -> id (** Return the id of the codec params. *) val get_params_id : video params -> id (** Returns the width set for the codec params. *) val get_width : video params -> int (** Returns the height set for the codec params. *) val get_height : video params -> int (** Returns the sample aspect ratio set for the codec params. *) val get_sample_aspect_ratio : video params -> Avutil.rational (** Returns the pixel format set for the codec params. *) val get_pixel_format : video params -> Avutil.Pixel_format.t option (** Returns the pixel aspect set for the codec params. *) val get_pixel_aspect : video params -> Avutil.rational option (** Returns the bit rate set for the codec. *) val get_bit_rate : video params -> int end (** Subtitle codecs. *) module Subtitle : sig (** Main subtitle types. *) type 'mode t = (subtitle, 'mode) codec (** Subtitle codec ids. Careful: different codecs share the same ID, e.g. aac and libfdk_aac *) type id = Codec_id.subtitle (** Get the codec descriptor. *) val descriptor : id -> descriptor option (** List all subtitle codec IDs. *) val codec_ids : Codec_id.subtitle list (** List of all available subtitle encoders. *) val encoders : encode t list (** List of all available subtitle decoders. *) val decoders : decode t list (** Find an encoder from its name. Raise Error if the codec is not found or is not an audio codec. *) val find_encoder_by_name : string -> encode t (** Find an encoder from its id. Raise Error if the codec is not found or is not an audio codec. *) val find_encoder : id -> encode t (** Find a decoder from its name. Raise Error if the codec is not found or is not an audio codec. *) val find_decoder_by_name : string -> decode t (** Find a decoder from its id. Raise Error if the codec is not found or is not an audio codec. *) val find_decoder : id -> decode t (** Return the name of a codec. *) val get_name : _ codec -> string (** Return the description of a codec. *) val get_description : _ codec -> string (** Return the name of the codec. *) val string_of_id : id -> string (** Return the ID (class) of a codec. *) val get_id : _ t -> id (** Return the id of the codec params. *) val get_params_id : subtitle params -> id end module Unknown : sig type 'mode t = ([ `Data ], 'mode) codec (* Unkown codecs seem used for data mostly. *) type id = Codec_id.unknown (** List of all unknown codec IDs. *) val codec_ids : Codec_id.unknown list (** Return the name of the codec. *) val string_of_id : id -> string (** Return the id of the codec params. *) val get_params_id : [ `Data ] params -> id end (* This includes all the codec. *) type id = Codec_id.codec_id val string_of_id : id -> string module BitstreamFilter : sig type filter = private { name : string; codecs : id list; options : Avutil.Options.t; } type 'a t val filters : filter list (** Init a filter with optional options and input params. Returns initialized filter with output params. *) val init : ?opts:opts -> filter -> 'a params -> 'a t * 'a params val send_packet : 'a t -> 'a Packet.t -> unit val send_eof : 'a t -> unit val receive_packet : 'a t -> 'a Packet.t end (** [Avcodec.decode decoder f packet] applies function [f] to the decoded frames frome the [packet] according to the [decoder] configuration. Raise Error if the decoding failed. *) val decode : 'media decoder -> ('media frame -> unit) -> 'media Packet.t -> unit (** [Avcodec.flush_decoder decoder f] applies function [f] to the decoded frames frome the buffered packets in the [decoder]. Raise Error if the decoding failed. *) val flush_decoder : 'media decoder -> ('media frame -> unit) -> unit (** [Avcodec.encode encoder f frame] applies function [f] to the encoded packets from the [frame] according to the [encoder] configuration. Raise Error if the encoding failed. *) val encode : 'media encoder -> ('media Packet.t -> unit) -> 'media frame -> unit (** [Avcodec.flush_encoder encoder] applies function [f] to the encoded packets from the buffered frames in the [encoder]. Raise Error if the encoding failed. *) val flush_encoder : 'media encoder -> ('media Packet.t -> unit) -> unit ocaml-ffmpeg-1.1.11/avcodec/avcodec_stubs.c000066400000000000000000001406541457634536500205600ustar00rootroot00000000000000#define CAML_NAME_SPACE 1 #include #include #include #include #include #include #include #include #include "avcodec_stubs.h" #include "avutil_stubs.h" #include "codec_capabilities_stubs.h" #include "codec_id_stubs.h" #include "codec_properties_stubs.h" #include "hw_config_method_stubs.h" #include "media_types_stubs.h" #include #include #ifndef AV_PKT_FLAG_DISPOSABLE #define AV_PKT_FLAG_DISPOSABLE 0x0010 #endif value ocaml_avcodec_init(value unit) { #if LIBAVCODEC_VERSION_INT < AV_VERSION_INT(58, 9, 100) avcodec_register_all(); #endif return Val_unit; } static value value_of_audio_codec_id(enum AVCodecID id) { value ret = Val_AudioCodecID(id); return ret; } static value value_of_video_codec_id(enum AVCodecID id) { value ret = Val_VideoCodecID(id); return ret; } static value value_of_subtitle_codec_id(enum AVCodecID id) { value ret = Val_SubtitleCodecID(id); return ret; } static value value_of_unknown_codec_id(enum AVCodecID id) { value ret = Val_UnknownCodecID(id); return ret; } CAMLprim value ocaml_avcodec_flag_qscale(value unit) { CAMLparam0(); CAMLreturn(Val_int(AV_CODEC_FLAG_QSCALE)); } CAMLprim value ocaml_avcodec_get_input_buffer_padding_size() { CAMLparam0(); CAMLreturn(Val_int(AV_INPUT_BUFFER_PADDING_SIZE)); } CAMLprim value ocaml_avcodec_subtitle_codec_id_to_AVCodecID(value _codec_id) { CAMLparam1(_codec_id); CAMLreturn(Val_int(SubtitleCodecID_val(_codec_id))); } /***** AVCodecContext *****/ static AVCodecContext *create_AVCodecContext(AVCodecParameters *params, const AVCodec *codec) { AVCodecContext *codec_context; int ret = 0; codec_context = avcodec_alloc_context3(codec); if (!codec_context) { caml_raise_out_of_memory(); } if (params) ret = avcodec_parameters_to_context(codec_context, params); if (ret < 0) { avcodec_free_context(&codec_context); ocaml_avutil_raise_error(ret); } // Open the codec caml_release_runtime_system(); ret = avcodec_open2(codec_context, codec, NULL); caml_acquire_runtime_system(); if (ret < 0) { avcodec_free_context(&codec_context); ocaml_avutil_raise_error(ret); } return codec_context; } /***** AVCodecParameters *****/ static void finalize_codec_parameters(value v) { struct AVCodecParameters *codec_parameters = CodecParameters_val(v); avcodec_parameters_free(&codec_parameters); } static struct custom_operations codec_parameters_ops = { "ocaml_avcodec_parameters", finalize_codec_parameters, custom_compare_default, custom_hash_default, custom_serialize_default, custom_deserialize_default}; void value_of_codec_parameters_copy(AVCodecParameters *src, value *pvalue) { if (!src) Fail("Failed to get codec parameters"); AVCodecParameters *dst = avcodec_parameters_alloc(); if (!dst) caml_raise_out_of_memory(); int ret = avcodec_parameters_copy(dst, src); if (ret < 0) ocaml_avutil_raise_error(ret); *pvalue = caml_alloc_custom(&codec_parameters_ops, sizeof(AVCodecParameters *), 0, 1); CodecParameters_val(*pvalue) = dst; } /***** AVPacket *****/ static void finalize_packet(value v) { struct AVPacket *packet = Packet_val(v); av_packet_free(&packet); } static struct custom_operations packet_ops = { "ocaml_packet", finalize_packet, custom_compare_default, custom_hash_default, custom_serialize_default, custom_deserialize_default}; value value_of_ffmpeg_packet(AVPacket *packet) { value ret; if (!packet) Fail("Empty packet"); int size = 0; if (packet->buf) size = packet->buf->size; ret = caml_alloc_custom_mem(&packet_ops, sizeof(AVPacket *), size); Packet_val(ret) = packet; return ret; } CAMLprim value ocaml_avcodec_create_packet(value _data) { CAMLparam1(_data); AVPacket *packet; int len = caml_string_length(_data); packet = av_packet_alloc(); if (!packet) caml_raise_out_of_memory(); int err = av_new_packet(packet, len); if (err != 0) { av_freep(packet); ocaml_avutil_raise_error(err); } memcpy(packet->data, String_val(_data), len); CAMLreturn(value_of_ffmpeg_packet(packet)); } CAMLprim value ocaml_avcodec_packet_content(value _packet) { CAMLparam1(_packet); AVPacket *packet = Packet_val(_packet); CAMLreturn(caml_alloc_initialized_string(packet->size, (char *)packet->data)); } CAMLprim value ocaml_avcodec_packet_add_side_data(value _packet, value _side_data) { CAMLparam2(_packet, _side_data); AVPacket *packet = Packet_val(_packet); enum AVPacketSideDataType type; uint8_t *data; AVReplayGain *replay_gain; size_t len; switch (Field(_side_data, 0)) { case PVV_Metadata_update: type = AV_PKT_DATA_METADATA_UPDATE; break; case PVV_Strings_metadata: type = AV_PKT_DATA_STRINGS_METADATA; break; case PVV_Replaygain: type = AV_PKT_DATA_REPLAYGAIN; break; default: Fail("Invalid value"); } switch (type) { case AV_PKT_DATA_METADATA_UPDATE: case AV_PKT_DATA_STRINGS_METADATA: len = caml_string_length(Field(_side_data, 1)); data = av_malloc(len); if (!data) caml_raise_out_of_memory(); memcpy(data, String_val(Field(_side_data, 1)), len); av_packet_add_side_data(packet, type, data, len); break; case AV_PKT_DATA_REPLAYGAIN: len = sizeof(AVReplayGain); data = av_malloc(len); if (!data) caml_raise_out_of_memory(); replay_gain = (AVReplayGain *)data; replay_gain->track_gain = Int_val(Field(Field(_side_data, 1), 0)); replay_gain->track_peak = Int_val(Field(Field(_side_data, 1), 1)); replay_gain->album_gain = Int_val(Field(Field(_side_data, 1), 2)); replay_gain->album_peak = Int_val(Field(Field(_side_data, 1), 3)); av_packet_add_side_data(packet, type, data, len); break; default: Fail("Invalid value"); } CAMLreturn(Val_unit); } CAMLprim value ocaml_avcodec_packet_side_data(value _packet) { CAMLparam1(_packet); CAMLlocal3(ret, tmp, tmp2); AVPacket *packet = Packet_val(_packet); int len = 0; int i; value type; AVReplayGain *replay_gain; for (i = 0; i < packet->side_data_elems; i++) { switch (packet->side_data[i].type) { case AV_PKT_DATA_METADATA_UPDATE: case AV_PKT_DATA_STRINGS_METADATA: case AV_PKT_DATA_REPLAYGAIN: len++; default: break; } } ret = caml_alloc_tuple(len); for (i = 0; i < len; i++) { switch (packet->side_data[i].type) { case AV_PKT_DATA_METADATA_UPDATE: case AV_PKT_DATA_STRINGS_METADATA: type = packet->side_data[i].type == AV_PKT_DATA_METADATA_UPDATE ? PVV_Metadata_update : PVV_Strings_metadata; tmp = caml_alloc_initialized_string(packet->side_data[i].size, (char *)packet->side_data[i].data); tmp2 = caml_alloc_tuple(2); Store_field(tmp2, 0, type); Store_field(tmp2, 1, tmp); Store_field(ret, i, tmp2); break; case AV_PKT_DATA_REPLAYGAIN: if (packet->side_data[i].size < sizeof(AVReplayGain)) Fail("Invalid side_data"); replay_gain = (AVReplayGain *)packet->side_data[i].data; tmp = caml_alloc_tuple(4); Store_field(tmp, 0, Val_int(replay_gain->track_gain)); Store_field(tmp, 1, Val_int(replay_gain->track_peak)); Store_field(tmp, 2, Val_int(replay_gain->album_gain)); Store_field(tmp, 3, Val_int(replay_gain->album_peak)); tmp2 = caml_alloc_tuple(2); Store_field(tmp2, 0, PVV_Replaygain); Store_field(tmp2, 1, tmp); Store_field(ret, i, tmp2); break; default: break; } } CAMLreturn(ret); }; CAMLprim value ocaml_avcodec_packet_dup(value _packet) { CAMLparam1(_packet); CAMLlocal1(ret); AVPacket *packet = av_packet_alloc(); if (!packet) caml_raise_out_of_memory(); av_packet_ref(packet, Packet_val(_packet)); ret = caml_alloc_custom(&packet_ops, sizeof(AVPacket *), 0, 1); Packet_val(ret) = packet; CAMLreturn(ret); } CAMLprim value ocaml_avcodec_get_flags(value _packet) { CAMLparam1(_packet); CAMLreturn(Val_int(Packet_val(_packet)->flags)); } CAMLprim value ocaml_avcodec_get_packet_stream_index(value _packet) { CAMLparam1(_packet); CAMLreturn(Val_int(Packet_val(_packet)->stream_index)); } CAMLprim value ocaml_avcodec_set_packet_stream_index(value _packet, value _index) { CAMLparam1(_packet); Packet_val(_packet)->stream_index = Int_val(_index); CAMLreturn(Val_unit); } CAMLprim value ocaml_avcodec_get_packet_pts(value _packet) { CAMLparam1(_packet); CAMLlocal1(ret); AVPacket *packet = Packet_val(_packet); if (packet->pts == AV_NOPTS_VALUE) CAMLreturn(Val_none); ret = caml_alloc_tuple(1); Store_field(ret, 0, caml_copy_int64(packet->pts)); CAMLreturn(ret); } CAMLprim value ocaml_avcodec_set_packet_pts(value _packet, value _pts) { CAMLparam2(_packet, _pts); AVPacket *packet = Packet_val(_packet); if (_pts == Val_none) packet->pts = AV_NOPTS_VALUE; else packet->pts = Int64_val(Field(_pts, 0)); CAMLreturn(Val_unit); } CAMLprim value ocaml_avcodec_get_packet_duration(value _packet) { CAMLparam1(_packet); CAMLlocal1(ret); AVPacket *packet = Packet_val(_packet); if (packet->duration == 0) CAMLreturn(Val_none); ret = caml_alloc_tuple(1); Store_field(ret, 0, caml_copy_int64(packet->duration)); CAMLreturn(ret); } CAMLprim value ocaml_avcodec_set_packet_duration(value _packet, value _duration) { CAMLparam2(_packet, _duration); AVPacket *packet = Packet_val(_packet); if (_duration == Val_none) packet->duration = 0; else packet->duration = Int64_val(Field(_duration, 0)); CAMLreturn(Val_unit); } CAMLprim value ocaml_avcodec_get_packet_position(value _packet) { CAMLparam1(_packet); CAMLlocal1(ret); AVPacket *packet = Packet_val(_packet); if (packet->pos == -1) CAMLreturn(Val_none); ret = caml_alloc_tuple(1); Store_field(ret, 0, caml_copy_int64(packet->pos)); CAMLreturn(ret); } CAMLprim value ocaml_avcodec_set_packet_position(value _packet, value _position) { CAMLparam2(_packet, _position); AVPacket *packet = Packet_val(_packet); if (_position == Val_none) packet->pos = -1; else packet->pos = Int64_val(Field(_position, 0)); CAMLreturn(Val_unit); } CAMLprim value ocaml_avcodec_get_packet_dts(value _packet) { CAMLparam1(_packet); CAMLlocal1(ret); AVPacket *packet = Packet_val(_packet); if (packet->dts == AV_NOPTS_VALUE) CAMLreturn(Val_none); ret = caml_alloc_tuple(1); Store_field(ret, 0, caml_copy_int64(packet->dts)); CAMLreturn(ret); } CAMLprim value ocaml_avcodec_set_packet_dts(value _packet, value _dts) { CAMLparam2(_packet, _dts); AVPacket *packet = Packet_val(_packet); if (_dts == Val_none) packet->dts = AV_NOPTS_VALUE; else packet->dts = Int64_val(Field(_dts, 0)); CAMLreturn(Val_unit); } CAMLprim value ocaml_avcodec_get_packet_size(value _packet) { CAMLparam1(_packet); CAMLreturn(Val_int(Packet_val(_packet)->size)); } CAMLprim value ocaml_avcodec_packet_to_bytes(value _packet) { CAMLparam1(_packet); CAMLlocal1(ans); struct AVPacket *packet = Packet_val(_packet); ans = caml_alloc_string(packet->size); memcpy((uint8_t *)String_val(ans), packet->data, packet->size); CAMLreturn(ans); } /***** AVCodecParserContext *****/ typedef struct { AVCodecParserContext *context; AVCodecContext *codec_context; } parser_t; #define Parser_val(v) (*(parser_t **)Data_custom_val(v)) static void free_parser(parser_t *parser) { if (!parser) return; if (parser->context) { caml_release_runtime_system(); av_parser_close(parser->context); caml_acquire_runtime_system(); } if (parser->codec_context) avcodec_free_context(&parser->codec_context); free(parser); } static void finalize_parser(value v) { free_parser(Parser_val(v)); } static struct custom_operations parser_ops = { "ocaml_avcodec_parser", finalize_parser, custom_compare_default, custom_hash_default, custom_serialize_default, custom_deserialize_default}; static parser_t *create_parser(AVCodecParameters *params, const AVCodec *codec) { parser_t *parser = (parser_t *)calloc(1, sizeof(parser_t)); if (!parser) caml_raise_out_of_memory(); caml_release_runtime_system(); parser->context = av_parser_init(codec->id); caml_acquire_runtime_system(); if (!parser->context) { free_parser(parser); caml_raise_out_of_memory(); } parser->codec_context = create_AVCodecContext(NULL, codec); return parser; } CAMLprim value ocaml_avcodec_create_parser(value _params, value _codec) { CAMLparam2(_params, _codec); CAMLlocal1(ans); const AVCodec *codec = AvCodec_val(_codec); AVCodecParameters *params = NULL; if (_params != Val_none) params = CodecParameters_val(Field(_params, 0)); parser_t *parser = create_parser(params, codec); ans = caml_alloc_custom(&parser_ops, sizeof(parser_t *), 0, 1); Parser_val(ans) = parser; CAMLreturn(ans); } CAMLprim value ocaml_avcodec_parse_packet(value _parser, value _data, value _ofs, value _len) { CAMLparam2(_parser, _data); CAMLlocal3(val_packet, tuple, ans); parser_t *parser = Parser_val(_parser); uint8_t *data = Caml_ba_data_val(_data) + Int_val(_ofs); size_t init_len = Int_val(_len); size_t len = init_len; int ret = 0; AVPacket *packet = av_packet_alloc(); if (!packet) caml_raise_out_of_memory(); do { ret = av_parser_parse2(parser->context, parser->codec_context, &packet->data, &packet->size, data, len, AV_NOPTS_VALUE, AV_NOPTS_VALUE, 0); data += ret; len -= ret; } while (packet->size == 0 && ret > 0); if (ret < 0) { av_packet_free(&packet); ocaml_avutil_raise_error(ret); } if (packet->size) { val_packet = value_of_ffmpeg_packet(packet); tuple = caml_alloc_tuple(2); Store_field(tuple, 0, val_packet); Store_field(tuple, 1, Val_int(init_len - len)); ans = caml_alloc(1, 0); Store_field(ans, 0, tuple); } else { av_packet_free(&packet); ans = Val_int(0); } CAMLreturn(ans); } /***** codec_context_t *****/ typedef struct { const AVCodec *codec; AVCodecContext *codec_context; // output int flushed; } codec_context_t; #define CodecContext_val(v) (*(codec_context_t **)Data_custom_val(v)) static void finalize_codec_context(value v) { codec_context_t *ctx = CodecContext_val(v); if (ctx->codec_context) avcodec_free_context(&ctx->codec_context); free(ctx); } static struct custom_operations codec_context_ops = { "ocaml_codec_context", finalize_codec_context, custom_compare_default, custom_hash_default, custom_serialize_default, custom_deserialize_default}; CAMLprim value ocaml_avcodec_create_decoder(value _params, value _codec) { CAMLparam2(_params, _codec); CAMLlocal1(ans); const AVCodec *codec = AvCodec_val(_codec); AVCodecParameters *params = NULL; if (_params != Val_none) params = CodecParameters_val(Field(_params, 0)); codec_context_t *ctx = (codec_context_t *)calloc(1, sizeof(codec_context_t)); if (!ctx) caml_raise_out_of_memory(); ans = caml_alloc_custom(&codec_context_ops, sizeof(codec_context_t *), 0, 1); CodecContext_val(ans) = ctx; ctx->codec = codec; ctx->codec_context = create_AVCodecContext(params, ctx->codec); CAMLreturn(ans); } CAMLprim value ocaml_avcodec_sample_format(value _ctx) { CAMLparam1(_ctx); codec_context_t *ctx = CodecContext_val(_ctx); CAMLreturn(Val_SampleFormat(ctx->codec_context->sample_fmt)); } CAMLprim value ocaml_avcodec_encoder_params(value _encoder) { CAMLparam1(_encoder); CAMLlocal1(ans); AVCodecParameters *params = avcodec_parameters_alloc(); if (!params) caml_raise_out_of_memory(); codec_context_t *ctx = CodecContext_val(_encoder); int err = avcodec_parameters_from_context(params, ctx->codec_context); if (err < 0) { avcodec_parameters_free(¶ms); ocaml_avutil_raise_error(err); } value_of_codec_parameters_copy(params, &ans); avcodec_parameters_free(¶ms); CAMLreturn(ans); } CAMLprim value ocaml_avcodec_encoder_time_base(value _encoder) { CAMLparam1(_encoder); CAMLlocal1(ans); codec_context_t *ctx = CodecContext_val(_encoder); value_of_rational(&ctx->codec_context->time_base, &ans); CAMLreturn(ans); } CAMLprim value ocaml_avcodec_create_audio_encoder(value _sample_fmt, value _codec, value _channels, value _opts) { CAMLparam2(_opts, _codec); CAMLlocal3(ret, ans, unused); const AVCodec *codec = AvCodec_val(_codec); AVDictionary *options = NULL; char *key, *val; int len = Wosize_val(_opts); int i, err, count; for (i = 0; i < len; i++) { // Dictionaries copy key/values by default! key = (char *)Bytes_val(Field(Field(_opts, i), 0)); val = (char *)Bytes_val(Field(Field(_opts, i), 1)); err = av_dict_set(&options, key, val, 0); if (err < 0) { av_dict_free(&options); ocaml_avutil_raise_error(err); } } codec_context_t *ctx = (codec_context_t *)calloc(1, sizeof(codec_context_t)); if (!ctx) caml_raise_out_of_memory(); ans = caml_alloc_custom(&codec_context_ops, sizeof(codec_context_t *), 0, 1); CodecContext_val(ans) = ctx; ctx->codec = codec; ctx->codec_context = avcodec_alloc_context3(codec); if (!ctx->codec_context) { caml_raise_out_of_memory(); } ctx->codec_context->sample_fmt = Int_val(_sample_fmt); ctx->codec_context->channels = Int_val(_channels); // Detect new API #ifdef AV_CHANNEL_LAYOUT_MONO av_channel_layout_default(&ctx->codec_context->ch_layout, Int_val(_channels)); #endif // Open the codec caml_release_runtime_system(); err = avcodec_open2(ctx->codec_context, ctx->codec, &options); caml_acquire_runtime_system(); if (err < 0) ocaml_avutil_raise_error(err); // Return unused keys count = av_dict_count(options); unused = caml_alloc_tuple(count); AVDictionaryEntry *entry = NULL; for (i = 0; i < count; i++) { entry = av_dict_get(options, "", entry, AV_DICT_IGNORE_SUFFIX); Store_field(unused, i, caml_copy_string(entry->key)); } av_dict_free(&options); ret = caml_alloc_tuple(2); Store_field(ret, 0, ans); Store_field(ret, 1, unused); CAMLreturn(ret); } CAMLprim value ocaml_avcodec_create_video_encoder(value _device_context, value _frame_context, value _pix_fmt, value _codec, value _opts) { CAMLparam3(_device_context, _frame_context, _codec); CAMLlocal3(ret, ans, unused); const AVCodec *codec = AvCodec_val(_codec); AVBufferRef *device_ctx = NULL; AVBufferRef *frame_ctx = NULL; if (_device_context != Val_none) device_ctx = BufferRef_val(Some_val(_device_context)); if (_frame_context != Val_none) frame_ctx = BufferRef_val(Some_val(_frame_context)); AVDictionary *options = NULL; char *key, *val; int len = Wosize_val(_opts); int i, err, count; for (i = 0; i < len; i++) { // Dictionaries copy key/values by default! key = (char *)Bytes_val(Field(Field(_opts, i), 0)); val = (char *)Bytes_val(Field(Field(_opts, i), 1)); err = av_dict_set(&options, key, val, 0); if (err < 0) { av_dict_free(&options); ocaml_avutil_raise_error(err); } } codec_context_t *ctx = (codec_context_t *)calloc(1, sizeof(codec_context_t)); if (!ctx) { av_dict_free(&options); caml_raise_out_of_memory(); } ans = caml_alloc_custom(&codec_context_ops, sizeof(codec_context_t *), 0, 1); CodecContext_val(ans) = ctx; ctx->codec = codec; ctx->codec_context = avcodec_alloc_context3(codec); if (!ctx->codec_context) { av_dict_free(&options); caml_raise_out_of_memory(); } ctx->codec_context->pix_fmt = Int_val(_pix_fmt); if (device_ctx) { ctx->codec_context->hw_device_ctx = av_buffer_ref(device_ctx); if (!ctx->codec_context->hw_device_ctx) { av_dict_free(&options); caml_raise_out_of_memory(); } } if (frame_ctx) { ctx->codec_context->hw_frames_ctx = av_buffer_ref(frame_ctx); if (!ctx->codec_context->hw_frames_ctx) { av_dict_free(&options); caml_raise_out_of_memory(); } } // Open the codec caml_release_runtime_system(); err = avcodec_open2(ctx->codec_context, ctx->codec, &options); caml_acquire_runtime_system(); if (err < 0) { av_dict_free(&options); ocaml_avutil_raise_error(err); } // Return unused keys count = av_dict_count(options); unused = caml_alloc_tuple(count); AVDictionaryEntry *entry = NULL; for (i = 0; i < count; i++) { entry = av_dict_get(options, "", entry, AV_DICT_IGNORE_SUFFIX); Store_field(unused, i, caml_copy_string(entry->key)); } av_dict_free(&options); ret = caml_alloc_tuple(2); Store_field(ret, 0, ans); Store_field(ret, 1, unused); CAMLreturn(ret); } CAMLprim value ocaml_avcodec_frame_size(value _ctx) { CAMLparam1(_ctx); codec_context_t *ctx = CodecContext_val(_ctx); CAMLreturn(Val_int(ctx->codec_context->frame_size)); } CAMLprim value ocaml_avcodec_send_packet(value _ctx, value _packet) { CAMLparam2(_ctx, _packet); codec_context_t *ctx = CodecContext_val(_ctx); AVPacket *packet = _packet ? Packet_val(_packet) : NULL; // send the packet with the compressed data to the decoder caml_release_runtime_system(); int ret = avcodec_send_packet(ctx->codec_context, packet); caml_acquire_runtime_system(); if (ret < 0) ocaml_avutil_raise_error(ret); CAMLreturn(Val_unit); } CAMLprim value ocaml_avcodec_receive_frame(value _ctx) { CAMLparam1(_ctx); CAMLlocal2(val_frame, ans); codec_context_t *ctx = CodecContext_val(_ctx); int ret = 0; AVFrame *frame = av_frame_alloc(); if (!frame) { caml_raise_out_of_memory(); } if (ctx->codec_context->hw_frames_ctx && frame) { AVFrame *hw_frame = av_frame_alloc(); if (!hw_frame) { caml_raise_out_of_memory(); } caml_release_runtime_system(); ret = av_hwframe_get_buffer(ctx->codec_context->hw_frames_ctx, hw_frame, 0); caml_acquire_runtime_system(); if (ret < 0) { av_frame_free(&hw_frame); ocaml_avutil_raise_error(ret); } if (!hw_frame->hw_frames_ctx) { caml_raise_out_of_memory(); } caml_release_runtime_system(); ret = av_hwframe_transfer_data(hw_frame, frame, 0); caml_acquire_runtime_system(); if (ret < 0) { av_frame_free(&hw_frame); ocaml_avutil_raise_error(ret); } frame = hw_frame; } caml_release_runtime_system(); ret = avcodec_receive_frame(ctx->codec_context, frame); caml_acquire_runtime_system(); if (ret < 0 && ret != AVERROR(EAGAIN)) { av_frame_free(&frame); ocaml_avutil_raise_error(ret); } if (ret == AVERROR(EAGAIN)) { ans = Val_int(0); } else { ans = caml_alloc(1, 0); val_frame = value_of_frame(frame); Store_field(ans, 0, val_frame); } CAMLreturn(ans); } CAMLprim value ocaml_avcodec_flush_decoder(value _ctx) { ocaml_avcodec_send_packet(_ctx, 0); return Val_unit; } static void send_frame(codec_context_t *ctx, AVFrame *frame) { int ret; AVFrame *hw_frame = NULL; ctx->flushed = !frame; if (ctx->codec_context->hw_frames_ctx && frame) { hw_frame = av_frame_alloc(); if (!hw_frame) { caml_raise_out_of_memory(); } ret = av_hwframe_get_buffer(ctx->codec_context->hw_frames_ctx, hw_frame, 0); if (ret < 0) { av_frame_free(&hw_frame); ocaml_avutil_raise_error(ret); } if (!hw_frame->hw_frames_ctx) { caml_raise_out_of_memory(); } ret = av_hwframe_transfer_data(hw_frame, frame, 0); if (ret < 0) { av_frame_free(&hw_frame); ocaml_avutil_raise_error(ret); } frame = hw_frame; } caml_release_runtime_system(); ret = avcodec_send_frame(ctx->codec_context, frame); caml_acquire_runtime_system(); if (hw_frame) av_frame_free(&hw_frame); if (ret < 0) ocaml_avutil_raise_error(ret); } CAMLprim value ocaml_avcodec_send_frame(value _ctx, value _frame) { CAMLparam2(_ctx, _frame); CAMLlocal1(val_packet); codec_context_t *ctx = CodecContext_val(_ctx); AVFrame *frame = _frame ? Frame_val(_frame) : NULL; send_frame(ctx, frame); CAMLreturn(Val_unit); } CAMLprim value ocaml_avcodec_receive_packet(value _ctx) { CAMLparam1(_ctx); CAMLlocal2(val_packet, ans); codec_context_t *ctx = CodecContext_val(_ctx); int ret = 0; AVPacket *packet = av_packet_alloc(); if (!packet) caml_raise_out_of_memory(); caml_release_runtime_system(); ret = avcodec_receive_packet(ctx->codec_context, packet); caml_acquire_runtime_system(); if (ret < 0) { av_packet_free(&packet); if (ret == AVERROR(EAGAIN) || ret == AVERROR_EOF) ans = Val_int(0); else ocaml_avutil_raise_error(ret); } else { ans = caml_alloc(1, 0); val_packet = value_of_ffmpeg_packet(packet); Store_field(ans, 0, val_packet); } CAMLreturn(ans); } CAMLprim value ocaml_avcodec_flush_encoder(value _ctx) { ocaml_avcodec_send_frame(_ctx, 0); return Val_unit; } /**** codec ****/ static const AVCodec *find_encoder_by_name(const char *name, enum AVMediaType type) { const AVCodec *codec = avcodec_find_encoder_by_name(name); if (!codec || codec->type != type) ocaml_avutil_raise_error(AVERROR_ENCODER_NOT_FOUND); return codec; } static const AVCodec *find_encoder(enum AVCodecID id, enum AVMediaType type) { const AVCodec *codec = avcodec_find_encoder(id); if (!codec || codec->type != type) ocaml_avutil_raise_error(AVERROR_ENCODER_NOT_FOUND); return codec; } static const AVCodec *find_decoder_by_name(const char *name, enum AVMediaType type) { const AVCodec *codec = avcodec_find_decoder_by_name(name); if (!codec || codec->type != type) ocaml_avutil_raise_error(AVERROR_DECODER_NOT_FOUND); return codec; } static const AVCodec *find_decoder(enum AVCodecID id, enum AVMediaType type) { const AVCodec *codec = avcodec_find_decoder(id); if (!codec || codec->type != type) ocaml_avutil_raise_error(AVERROR_DECODER_NOT_FOUND); return codec; } CAMLprim value ocaml_avcodec_parameters_get_bit_rate(value _cp) { CAMLparam1(_cp); CAMLreturn(Val_int(CodecParameters_val(_cp)->bit_rate)); } /**** Audio codec ID ****/ CAMLprim value ocaml_avcodec_get_audio_codec_id(value _codec) { CAMLparam1(_codec); const AVCodec *codec = AvCodec_val(_codec); CAMLreturn(value_of_audio_codec_id(codec->id)); } CAMLprim value ocaml_avcodec_get_video_codec_id(value _codec) { CAMLparam1(_codec); const AVCodec *codec = AvCodec_val(_codec); CAMLreturn(value_of_video_codec_id(codec->id)); } CAMLprim value ocaml_avcodec_get_subtitle_codec_id(value _codec) { CAMLparam1(_codec); const AVCodec *codec = AvCodec_val(_codec); CAMLreturn(value_of_subtitle_codec_id(codec->id)); } CAMLprim value ocaml_avcodec_get_audio_codec_id_name(value _codec_id) { CAMLparam1(_codec_id); CAMLreturn(caml_copy_string( avcodec_get_name((enum AVCodecID)AudioCodecID_val(_codec_id)))); } CAMLprim value ocaml_avcodec_get_codec_id_name(value _codec_id) { CAMLparam1(_codec_id); CAMLreturn(caml_copy_string( avcodec_get_name((enum AVCodecID)CodecID_val(_codec_id)))); } CAMLprim value ocaml_avcodec_find_audio_encoder_by_name(value _name) { CAMLparam1(_name); CAMLlocal1(ret); CAMLreturn(value_of_avcodec( ret, find_encoder_by_name(String_val(_name), AVMEDIA_TYPE_AUDIO))); } CAMLprim value ocaml_avcodec_find_audio_encoder(value _id) { CAMLparam1(_id); CAMLlocal1(ret); CAMLreturn(value_of_avcodec( ret, find_encoder(AudioCodecID_val(_id), AVMEDIA_TYPE_AUDIO))); } CAMLprim value ocaml_avcodec_find_audio_decoder_by_name(value _name) { CAMLparam1(_name); CAMLlocal1(ret); CAMLreturn(value_of_avcodec( ret, find_decoder_by_name(String_val(_name), AVMEDIA_TYPE_AUDIO))); } CAMLprim value ocaml_avcodec_find_audio_decoder(value _id) { CAMLparam1(_id); CAMLlocal1(ret); CAMLreturn(value_of_avcodec( ret, find_decoder(AudioCodecID_val(_id), AVMEDIA_TYPE_AUDIO))); } CAMLprim value ocaml_avcodec_name(value _codec) { CAMLparam1(_codec); CAMLreturn(caml_copy_string(AvCodec_val(_codec)->name)); } CAMLprim value ocaml_avcodec_capabilities(value _codec) { CAMLparam1(_codec); CAMLlocal1(ret); const AVCodec *codec = AvCodec_val(_codec); int i, len; len = 0; for (i = 0; i < AV_CODEC_CAP_T_TAB_LEN; i++) if (codec->capabilities & AV_CODEC_CAP_T_TAB[i][1]) len++; ret = caml_alloc_tuple(len); len = 0; for (i = 0; i < AV_CODEC_CAP_T_TAB_LEN; i++) if (codec->capabilities & AV_CODEC_CAP_T_TAB[i][1]) Store_field(ret, len++, Val_int(AV_CODEC_CAP_T_TAB[i][0])); CAMLreturn(ret); } CAMLprim value ocaml_avcodec_descriptor(enum AVCodecID id) { CAMLparam0(); CAMLlocal3(ret, tmp, _profile); const AVCodecDescriptor *descriptor = avcodec_descriptor_get(id); int i, len; char **p; struct AVProfile *profile; if (!descriptor) CAMLreturn(Val_none); ret = caml_alloc_tuple(6); Store_field(ret, 0, Val_MediaTypes(descriptor->type)); Store_field(ret, 1, caml_copy_string(descriptor->name)); if (descriptor->long_name) { tmp = caml_alloc_tuple(1); Store_field(tmp, 0, caml_copy_string(descriptor->long_name)); Store_field(ret, 2, tmp); } else Store_field(ret, 2, Val_none); len = 0; for (i = 0; i < AV_CODEC_PROP_T_TAB_LEN; i++) { if (descriptor->props & AV_CODEC_PROP_T_TAB[i][1]) len++; } tmp = caml_alloc_tuple(len); len = 0; for (i = 0; i < AV_CODEC_PROP_T_TAB_LEN; i++) { if (descriptor->props & AV_CODEC_PROP_T_TAB[i][1]) { Store_field(tmp, len, AV_CODEC_PROP_T_TAB[i][0]); len++; } } Store_field(ret, 3, tmp); len = 0; p = (char **)descriptor->mime_types; while (p && *p) { len++; p++; } tmp = caml_alloc_tuple(len); len = 0; p = (char **)descriptor->mime_types; while (p && *p) { Store_field(tmp, len, caml_copy_string(*p)); len++; p++; } Store_field(ret, 4, tmp); len = 0; profile = (struct AVProfile *)descriptor->profiles; while (profile && profile->profile != FF_PROFILE_UNKNOWN) { len++; profile++; } tmp = caml_alloc_tuple(len); len = 0; profile = (struct AVProfile *)descriptor->profiles; while (profile && profile->profile != FF_PROFILE_UNKNOWN) { _profile = caml_alloc_tuple(2); Store_field(_profile, 0, Val_int(profile->profile)); Store_field(_profile, 1, caml_copy_string(profile->name)); Store_field(tmp, len, _profile); len++; profile++; } Store_field(ret, 5, tmp); tmp = caml_alloc_tuple(1); Store_field(tmp, 0, ret); CAMLreturn(tmp); } CAMLprim value ocaml_avcodec_params_descriptor(value _params) { CAMLparam1(_params); CAMLreturn(ocaml_avcodec_descriptor(CodecParameters_val(_params)->codec_id)); } value ocaml_avcodec_audio_descriptor(value _codec_id) { return ocaml_avcodec_descriptor(AudioCodecID_val(_codec_id)); } value ocaml_avcodec_video_descriptor(value _codec_id) { return ocaml_avcodec_descriptor(VideoCodecID_val(_codec_id)); } value ocaml_avcodec_subtitle_descriptor(value _codec_id) { return ocaml_avcodec_descriptor(SubtitleCodecID_val(_codec_id)); } CAMLprim value ocaml_avcodec_hw_methods(value _codec) { CAMLparam1(_codec); CAMLlocal5(ret, tmp1, cons1, tmp2, cons2); const AVCodec *codec = AvCodec_val(_codec); int n, i = 0; const AVCodecHWConfig *config = avcodec_get_hw_config(codec, i); if (!config) CAMLreturn(Val_int(0)); cons1 = Val_int(0); do { ret = caml_alloc(2, 0); Store_field(ret, 1, cons1); tmp1 = caml_alloc_tuple(3); Store_field(tmp1, 0, Val_PixelFormat(config->pix_fmt)); tmp2 = Val_int(0); cons2 = Val_int(0); for (n = 0; n < AV_CODEC_HW_CONFIG_METHOD_T_TAB_LEN; n++) { if (config->methods & AV_CODEC_HW_CONFIG_METHOD_T_TAB[n][1]) { tmp2 = caml_alloc(2, 0); Store_field(tmp2, 0, AV_CODEC_HW_CONFIG_METHOD_T_TAB[n][0]); Store_field(tmp2, 1, cons2); cons2 = tmp2; } } Store_field(tmp1, 1, tmp2); Store_field(tmp1, 2, Val_HwDeviceType(config->device_type)); Store_field(ret, 0, tmp1); cons1 = ret; i++; config = avcodec_get_hw_config(codec, i); } while (config); CAMLreturn(ret); } CAMLprim value ocaml_avcodec_get_supported_channel_layouts(value _codec) { CAMLparam1(_codec); CAMLlocal2(list, cons); int i; List_init(list); const AVCodec *codec = AvCodec_val(_codec); if (codec->channel_layouts) { for (i = 0; codec->channel_layouts[i] != 0; i++) List_add(list, cons, Val_ChannelLayout(codec->channel_layouts[i])); } CAMLreturn(list); } CAMLprim value ocaml_avcodec_get_supported_sample_formats(value _codec) { CAMLparam1(_codec); CAMLlocal2(list, cons); int i; List_init(list); const AVCodec *codec = AvCodec_val(_codec); if (codec->sample_fmts) { for (i = 0; codec->sample_fmts[i] != -1; i++) List_add(list, cons, Val_SampleFormat(codec->sample_fmts[i])); } CAMLreturn(list); } CAMLprim value ocaml_avcodec_get_supported_sample_rates(value _codec) { CAMLparam1(_codec); CAMLlocal2(list, cons); int i; List_init(list); const AVCodec *codec = AvCodec_val(_codec); if (codec->supported_samplerates) { for (i = 0; codec->supported_samplerates[i] != 0; i++) List_add(list, cons, Val_int(codec->supported_samplerates[i])); } CAMLreturn(list); } /**** Audio codec parameters ****/ CAMLprim value ocaml_avcodec_parameters_get_audio_codec_id(value _cp) { CAMLparam1(_cp); CAMLreturn(value_of_audio_codec_id(CodecParameters_val(_cp)->codec_id)); } CAMLprim value ocaml_avcodec_parameters_get_channel_layout(value _cp) { CAMLparam1(_cp); AVCodecParameters *cp = CodecParameters_val(_cp); if (cp->channel_layout == 0) { cp->channel_layout = av_get_default_channel_layout(cp->channels); } CAMLreturn(Val_ChannelLayout(cp->channel_layout)); } CAMLprim value ocaml_avcodec_parameters_get_nb_channels(value _cp) { CAMLparam1(_cp); CAMLreturn(Val_int(CodecParameters_val(_cp)->channels)); } CAMLprim value ocaml_avcodec_parameters_get_sample_format(value _cp) { CAMLparam1(_cp); CAMLreturn( Val_SampleFormat((enum AVSampleFormat)CodecParameters_val(_cp)->format)); } CAMLprim value ocaml_avcodec_parameters_get_sample_rate(value _cp) { CAMLparam1(_cp); CAMLreturn(Val_int(CodecParameters_val(_cp)->sample_rate)); } CAMLprim value ocaml_avcodec_parameters_audio_copy(value _codec_id, value _channel_layout, value _sample_format, value _sample_rate, value _cp) { CAMLparam4(_codec_id, _channel_layout, _sample_format, _cp); CAMLlocal1(ans); value_of_codec_parameters_copy(CodecParameters_val(_cp), &ans); AVCodecParameters *dst = CodecParameters_val(ans); dst->codec_id = AudioCodecID_val(_codec_id); dst->channel_layout = ChannelLayout_val(_channel_layout); dst->channels = av_get_channel_layout_nb_channels(dst->channel_layout); dst->format = SampleFormat_val(_sample_format); dst->sample_rate = Int_val(_sample_rate); CAMLreturn(ans); } /**** Video codec ID ****/ CAMLprim value ocaml_avcodec_get_video_codec_id_name(value _codec_id) { CAMLparam1(_codec_id); CAMLreturn(caml_copy_string(avcodec_get_name(VideoCodecID_val(_codec_id)))); } CAMLprim value ocaml_avcodec_find_video_decoder_by_name(value _name) { CAMLparam1(_name); CAMLlocal1(ret); CAMLreturn(value_of_avcodec( ret, find_decoder_by_name(String_val(_name), AVMEDIA_TYPE_VIDEO))); } CAMLprim value ocaml_avcodec_find_video_decoder(value _id) { CAMLparam1(_id); CAMLlocal1(ret); CAMLreturn(value_of_avcodec( ret, find_decoder(VideoCodecID_val(_id), AVMEDIA_TYPE_VIDEO))); } CAMLprim value ocaml_avcodec_find_video_encoder_by_name(value _name) { CAMLparam1(_name); CAMLlocal1(ret); CAMLreturn(value_of_avcodec( ret, find_encoder_by_name(String_val(_name), AVMEDIA_TYPE_VIDEO))); } CAMLprim value ocaml_avcodec_find_video_encoder(value _id) { CAMLparam1(_id); CAMLlocal1(ret); CAMLreturn(value_of_avcodec( ret, find_encoder(VideoCodecID_val(_id), AVMEDIA_TYPE_VIDEO))); } CAMLprim value ocaml_avcodec_get_supported_frame_rates(value _codec) { CAMLparam1(_codec); CAMLlocal3(list, cons, val); int i; List_init(list); const AVCodec *codec = AvCodec_val(_codec); if (codec->supported_framerates) { for (i = 0; codec->supported_framerates[i].num != 0; i++) { value_of_rational(&codec->supported_framerates[i], &val); List_add(list, cons, val); } } CAMLreturn(list); } CAMLprim value ocaml_avcodec_get_supported_pixel_formats(value _codec) { CAMLparam1(_codec); CAMLlocal2(list, cons); int i; List_init(list); const AVCodec *codec = AvCodec_val(_codec); if (codec->pix_fmts) { for (i = 0; codec->pix_fmts[i] != -1; i++) List_add(list, cons, Val_PixelFormat(codec->pix_fmts[i])); } CAMLreturn(list); } /**** Video codec parameters ****/ CAMLprim value ocaml_avcodec_parameters_get_video_codec_id(value _cp) { CAMLparam1(_cp); CAMLreturn(value_of_video_codec_id(CodecParameters_val(_cp)->codec_id)); } CAMLprim value ocaml_avcodec_parameters_get_width(value _cp) { CAMLparam1(_cp); CAMLreturn(Val_int(CodecParameters_val(_cp)->width)); } CAMLprim value ocaml_avcodec_parameters_get_height(value _cp) { CAMLparam1(_cp); CAMLreturn(Val_int(CodecParameters_val(_cp)->height)); } CAMLprim value ocaml_avcodec_parameters_get_sample_aspect_ratio(value _cp) { CAMLparam1(_cp); CAMLlocal1(ans); value_of_rational(&CodecParameters_val(_cp)->sample_aspect_ratio, &ans); CAMLreturn(ans); } CAMLprim value ocaml_avcodec_parameters_get_pixel_format(value _cp) { CAMLparam1(_cp); CAMLlocal1(ret); enum AVPixelFormat f = CodecParameters_val(_cp)->format; if (f == AV_PIX_FMT_NONE) CAMLreturn(Val_none); ret = caml_alloc_tuple(1); Store_field(ret, 0, Val_PixelFormat(f)); CAMLreturn(ret); } CAMLprim value ocaml_avcodec_parameters_get_pixel_aspect(value _cp) { CAMLparam1(_cp); CAMLlocal2(ret, ans); const AVRational pixel_aspect = CodecParameters_val(_cp)->sample_aspect_ratio; if (pixel_aspect.num == 0) CAMLreturn(Val_none); value_of_rational(&pixel_aspect, &ans); ret = caml_alloc_tuple(1); Store_field(ret, 0, ans); CAMLreturn(ret); } CAMLprim value ocaml_avcodec_parameters_video_copy(value _codec_id, value _width, value _height, value _sample_aspect_ratio, value _pixel_format, value _bit_rate, value _cp) { CAMLparam4(_codec_id, _sample_aspect_ratio, _pixel_format, _cp); CAMLlocal1(ans); value_of_codec_parameters_copy(CodecParameters_val(_cp), &ans); AVCodecParameters *dst = CodecParameters_val(ans); dst->codec_id = VideoCodecID_val(_codec_id); dst->width = Int_val(_width); dst->height = Int_val(_height); dst->sample_aspect_ratio.num = Int_val(Field(_sample_aspect_ratio, 0)); dst->sample_aspect_ratio.den = Int_val(Field(_sample_aspect_ratio, 1)); dst->format = PixelFormat_val(_pixel_format); dst->bit_rate = Int_val(_bit_rate); CAMLreturn(ans); } CAMLprim value ocaml_avcodec_parameters_video_copy_byte(value *argv, int argn) { return ocaml_avcodec_parameters_video_copy(argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[7]); } /**** Unknown codec ID *****/ CAMLprim value ocaml_avcodec_get_unknown_codec_id_name(value _codec_id) { CAMLparam1(_codec_id); CAMLreturn(caml_copy_string(avcodec_get_name(UnknownCodecID_val(_codec_id)))); } CAMLprim value ocaml_avcodec_parameters_get_unknown_codec_id(value _cp) { CAMLparam1(_cp); CAMLreturn(value_of_unknown_codec_id(CodecParameters_val(_cp)->codec_id)); } /**** Subtitle codec ID ****/ CAMLprim value ocaml_avcodec_get_subtitle_codec_id_name(value _codec_id) { CAMLparam1(_codec_id); CAMLreturn( caml_copy_string(avcodec_get_name(SubtitleCodecID_val(_codec_id)))); } CAMLprim value ocaml_avcodec_find_subtitle_decoder_by_name(value _name) { CAMLparam1(_name); CAMLlocal1(ret); CAMLreturn(value_of_avcodec( ret, find_decoder_by_name(String_val(_name), AVMEDIA_TYPE_SUBTITLE))); } CAMLprim value ocaml_avcodec_find_subtitle_decoder(value _id) { CAMLparam1(_id); CAMLlocal1(ret); CAMLreturn(value_of_avcodec( ret, find_decoder(SubtitleCodecID_val(_id), AVMEDIA_TYPE_SUBTITLE))); } CAMLprim value ocaml_avcodec_find_subtitle_encoder_by_name(value _name) { CAMLparam1(_name); CAMLlocal1(ret); CAMLreturn(value_of_avcodec( ret, find_encoder_by_name(String_val(_name), AVMEDIA_TYPE_SUBTITLE))); } CAMLprim value ocaml_avcodec_find_subtitle_encoder(value _id) { CAMLparam1(_id); CAMLlocal1(ret); CAMLreturn(value_of_avcodec( ret, find_encoder(SubtitleCodecID_val(_id), AVMEDIA_TYPE_SUBTITLE))); } /**** Subtitle codec parameters ****/ CAMLprim value ocaml_avcodec_parameters_get_subtitle_codec_id(value _cp) { CAMLparam1(_cp); CAMLreturn(value_of_subtitle_codec_id(CodecParameters_val(_cp)->codec_id)); } CAMLprim value ocaml_avcodec_parameters_subtitle_copy(value _codec_id, value _cp) { CAMLparam2(_codec_id, _cp); CAMLlocal1(ans); value_of_codec_parameters_copy(CodecParameters_val(_cp), &ans); AVCodecParameters *dst = CodecParameters_val(ans); dst->codec_id = SubtitleCodecID_val(_codec_id); CAMLreturn(ans); } CAMLprim value ocaml_avcodec_int_of_flag(value _flag) { CAMLparam1(_flag); switch (_flag) { case PVV_Keyframe: CAMLreturn(Val_int(AV_PKT_FLAG_KEY)); case PVV_Corrupt: CAMLreturn(Val_int(AV_PKT_FLAG_CORRUPT)); case PVV_Discard: CAMLreturn(Val_int(AV_PKT_FLAG_DISCARD)); case PVV_Trusted: CAMLreturn(Val_int(AV_PKT_FLAG_TRUSTED)); case PVV_Disposable: CAMLreturn(Val_int(AV_PKT_FLAG_DISPOSABLE)); default: caml_failwith("Invalid flag type!"); } } CAMLprim value ocaml_avcodec_get_next_codec(value h) { CAMLparam0(); CAMLlocal5(_tmp, _id, _h, _ans, _ret); void *s; const AVCodec *codec; enum AVCodecID id = VALUE_NOT_FOUND; int i; if (h == Val_int(0)) { s = NULL; } else { s = AvObj_val(Field(h, 0)); } codec = av_codec_iterate(&s); if (!codec) { CAMLreturn(Val_int(0)); } for (i = 0; i < AV_CODEC_ID_AUDIO_TAB_LEN; i++) { if (codec->id == AV_CODEC_ID_AUDIO_TAB[i][1]) id = AV_CODEC_ID_AUDIO_TAB[i][0]; } for (i = 0; i < AV_CODEC_ID_VIDEO_TAB_LEN; i++) { if (codec->id == AV_CODEC_ID_VIDEO_TAB[i][1]) id = AV_CODEC_ID_VIDEO_TAB[i][0]; } for (i = 0; i < AV_CODEC_ID_SUBTITLE_TAB_LEN; i++) { if (codec->id == AV_CODEC_ID_SUBTITLE_TAB[i][1]) id = AV_CODEC_ID_SUBTITLE_TAB[i][0]; } if (id == VALUE_NOT_FOUND) _id = Val_int(0); else { _id = caml_alloc_tuple(1); Store_field(_id, 0, id); } _h = caml_alloc_tuple(1); Store_field(_h, 0, value_of_avobj(_tmp, s)); _ans = caml_alloc_tuple(4); Store_field(_ans, 0, value_of_avcodec(_tmp, codec)); Store_field(_ans, 1, _id); Store_field(_ans, 2, Val_bool(av_codec_is_encoder(codec))); Store_field(_ans, 3, _h); _ret = caml_alloc_tuple(1); Store_field(_ret, 0, _ans); CAMLreturn(_ret); } CAMLprim value ocaml_avcodec_get_name(value codec) { CAMLparam0(); CAMLreturn(caml_copy_string(AvCodec_val(codec)->name)); } CAMLprim value ocaml_avcodec_get_description(value _codec) { CAMLparam1(_codec); const AVCodec *codec = AvCodec_val(_codec); if (!codec->long_name) CAMLreturn(caml_copy_string("")); CAMLreturn(caml_copy_string(codec->long_name)); } #define BsfCursor_val(v) (*(void **)Data_abstract_val(v)) CAMLprim value ocaml_avcodec_bsf_next(value _cursor) { CAMLparam1(_cursor); CAMLlocal2(ans, tmp); int len; enum AVCodecID *codec_id; void *cursor = NULL; if (_cursor != Val_none) cursor = BsfCursor_val(Field(_cursor, 0)); const AVBitStreamFilter *filter = av_bsf_iterate(&cursor); if (!filter) CAMLreturn(Val_none); ans = caml_alloc_tuple(4); Store_field(ans, 0, caml_copy_string(filter->name)); len = 0; codec_id = (enum AVCodecID *)filter->codec_ids; while (codec_id && *codec_id != AV_CODEC_ID_NONE) { codec_id++; len++; } tmp = caml_alloc_tuple(len); len = 0; codec_id = (enum AVCodecID *)filter->codec_ids; while (codec_id && *codec_id != AV_CODEC_ID_NONE) { Store_field(tmp, len, Val_CodecID(*codec_id)); codec_id++; len++; } Store_field(ans, 1, tmp); Store_field(ans, 2, value_of_avclass(tmp, filter->priv_class)); tmp = caml_alloc(1, Abstract_tag); BsfCursor_val(tmp) = cursor; Store_field(ans, 3, tmp); tmp = caml_alloc_tuple(1); Store_field(tmp, 0, ans); CAMLreturn(tmp); } #define BsfFilter_val(v) (*(AVBSFContext **)Data_custom_val(v)) static void finalize_bsf_filter(value v) { AVBSFContext *filter = BsfFilter_val(v); av_bsf_free(&filter); } static struct custom_operations bsf_filter_ops = { "bsf_filter_parameters", finalize_bsf_filter, custom_compare_default, custom_hash_default, custom_serialize_default, custom_deserialize_default}; CAMLprim value ocaml_avcodec_bsf_init(value _opts, value _name, value _params) { CAMLparam3(_opts, _name, _params); CAMLlocal3(tmp, ans, unused); AVCodecParameters *params = CodecParameters_val(_params); AVBSFContext *bsf; const AVBitStreamFilter *filter; AVDictionary *options = NULL; int ret; filter = av_bsf_get_by_name(String_val(_name)); if (!filter) { caml_raise_not_found(); } char *key, *val; int len = Wosize_val(_opts); int i, err, count; for (i = 0; i < len; i++) { // Dictionaries copy key/values by default! key = (char *)Bytes_val(Field(Field(_opts, i), 0)); val = (char *)Bytes_val(Field(Field(_opts, i), 1)); err = av_dict_set(&options, key, val, 0); if (err < 0) { av_dict_free(&options); ocaml_avutil_raise_error(err); } } ret = av_bsf_alloc(filter, &bsf); if (ret < 0) { ocaml_avutil_raise_error(ret); } ret = avcodec_parameters_copy(bsf->par_in, params); if (ret < 0) { av_bsf_free(&bsf); ocaml_avutil_raise_error(ret); } ret = av_opt_set_dict(bsf, &options); if (ret < 0) { av_bsf_free(&bsf); ocaml_avutil_raise_error(ret); } caml_release_runtime_system(); ret = av_bsf_init(bsf); caml_acquire_runtime_system(); if (ret < 0) { av_bsf_free(&bsf); ocaml_avutil_raise_error(ret); } // Return unused keys count = av_dict_count(options); unused = caml_alloc_tuple(count); AVDictionaryEntry *entry = NULL; for (i = 0; i < count; i++) { entry = av_dict_get(options, "", entry, AV_DICT_IGNORE_SUFFIX); Store_field(unused, i, caml_copy_string(entry->key)); } av_dict_free(&options); tmp = caml_alloc_custom(&bsf_filter_ops, sizeof(AVBSFContext *), 0, 1); BsfFilter_val(tmp) = bsf; ans = caml_alloc_tuple(3); Store_field(ans, 0, tmp); value_of_codec_parameters_copy(bsf->par_out, &tmp); Store_field(ans, 1, tmp); Store_field(ans, 2, unused); CAMLreturn(ans); } CAMLprim value ocaml_avcodec_bsf_send_packet(value _filter, value _packet) { CAMLparam2(_filter, _packet); int ret; AVPacket *packet = Packet_val(_packet); AVBSFContext *bsf = BsfFilter_val(_filter); caml_release_runtime_system(); ret = av_bsf_send_packet(bsf, packet); caml_acquire_runtime_system(); if (ret < 0) { ocaml_avutil_raise_error(ret); } CAMLreturn(Val_unit); } CAMLprim value ocaml_avcodec_bsf_send_eof(value _filter) { CAMLparam1(_filter); int ret; AVBSFContext *bsf = BsfFilter_val(_filter); caml_release_runtime_system(); ret = av_bsf_send_packet(bsf, NULL); caml_acquire_runtime_system(); if (ret < 0) { ocaml_avutil_raise_error(ret); } CAMLreturn(Val_unit); } CAMLprim value ocaml_avcodec_bsf_receive_packet(value _filter) { CAMLparam1(_filter); int ret; AVPacket *packet; packet = av_packet_alloc(); if (!packet) { caml_raise_out_of_memory(); } caml_release_runtime_system(); ret = av_bsf_receive_packet(BsfFilter_val(_filter), packet); caml_acquire_runtime_system(); if (ret < 0) { av_packet_free(&packet); ocaml_avutil_raise_error(ret); } CAMLreturn(value_of_ffmpeg_packet(packet)); } ocaml-ffmpeg-1.1.11/avcodec/avcodec_stubs.h000066400000000000000000000023331457634536500205540ustar00rootroot00000000000000#ifndef _AVCODEC_STUBS_H_ #define _AVCODEC_STUBS_H_ #include #include /***** AVCodec *****/ #define AvCodec_val(v) (*(const AVCodec **)Data_abstract_val(v)) static inline value value_of_avcodec(value ret, const AVCodec *avcodec) { ret = caml_alloc(1, Abstract_tag); AvCodec_val(ret) = avcodec; return ret; } /***** Codec parameters *****/ #define CodecParameters_val(v) \ (*(struct AVCodecParameters **)Data_custom_val(v)) void value_of_codec_parameters_copy(AVCodecParameters *src, value *pvalue); /***** Packet *****/ #define Packet_val(v) (*(struct AVPacket **)Data_custom_val(v)) value value_of_ffmpeg_packet(AVPacket *packet); /**** Audio codec ID ****/ enum AVCodecID AudioCodecID_val(value v); value Val_AudioCodecID(enum AVCodecID id); /**** Video codec ID ****/ enum AVCodecID VideoCodecID_val(value v); value Val_VideoCodecID(enum AVCodecID id); /**** Subtitle codec ID ****/ enum AVCodecID SubtitleCodecID_val(value v); value Val_SubtitleCodecID(enum AVCodecID id); /**** Unknown codec ID ****/ enum AVCodecID UnknownCodecID_val(value v); value Val_UnknownCodecID(enum AVCodecID id); #endif // _AVCODEC_STUBS_H_ ocaml-ffmpeg-1.1.11/avcodec/config/000077500000000000000000000000001457634536500170235ustar00rootroot00000000000000ocaml-ffmpeg-1.1.11/avcodec/config/discover.ml000066400000000000000000000013041457634536500211710ustar00rootroot00000000000000module C = Configurator.V1 let () = C.main ~name:"ffmpeg-avcodec-pkg-config" (fun c -> let default : C.Pkg_config.package_conf = { libs = ["-lavcodec"]; cflags = [] } in let conf = match C.Pkg_config.get c with | None -> default | Some pc -> ( match C.Pkg_config.query_expr_err pc ~package:"libavcodec" ~expr:"libavcodec >= 58.87.100" with | Error msg -> failwith msg | Ok deps -> deps) in C.Flags.write_sexp "c_flags.sexp" conf.cflags; C.Flags.write_lines "c_flags" conf.cflags; C.Flags.write_sexp "c_library_flags.sexp" conf.libs) ocaml-ffmpeg-1.1.11/avcodec/config/dune000066400000000000000000000000751457634536500177030ustar00rootroot00000000000000(executable (name discover) (libraries dune.configurator)) ocaml-ffmpeg-1.1.11/avcodec/dune000066400000000000000000000036761457634536500164500ustar00rootroot00000000000000(library (name avcodec) (public_name ffmpeg-avcodec) (synopsis "Bindings to ffmpeg's avcodec library") (foreign_stubs (language c) (names avcodec_stubs) (flags (:include c_flags.sexp))) (c_library_flags (:include c_library_flags.sexp)) (install_c_headers avcodec_stubs) (libraries ffmpeg-avutil)) (rule (targets c_flags c_flags.sexp c_library_flags.sexp) (action (run ./config/discover.exe))) (rule (targets avcodec_stubs.c) (mode fallback) (deps hw_config_method_stubs.h codec_capabilities_stubs.h codec_properties_stubs.h codec_id_stubs.h) (action (echo "this should not happen"))) (rule (targets hw_config_method_stubs.h) (deps c_flags) (action (run ../gen_code/gen_code.exe "%{cc}" hw_config_method h %{read-lines:c_flags}))) (rule (targets hw_config_method.ml) (deps hw_config_method_stubs.h c_flags) (action (run ../gen_code/gen_code.exe "%{cc}" hw_config_method ml %{read-lines:c_flags}))) (rule (targets codec_capabilities_stubs.h) (deps c_flags) (action (run ../gen_code/gen_code.exe "%{cc}" codec_capabilities h %{read-lines:c_flags}))) (rule (targets codec_capabilities.ml) (deps codec_capabilities_stubs.h c_flags) (action (run ../gen_code/gen_code.exe "%{cc}" codec_capabilities ml %{read-lines:c_flags}))) (rule (targets codec_properties_stubs.h) (deps c_flags) (action (run ../gen_code/gen_code.exe "%{cc}" codec_properties h %{read-lines:c_flags}))) (rule (targets codec_properties.ml) (deps codec_properties_stubs.h c_flags) (action (run ../gen_code/gen_code.exe "%{cc}" codec_properties ml %{read-lines:c_flags}))) (rule (targets codec_id_stubs.h) (deps c_flags) (action (run ../gen_code/gen_code.exe "%{cc}" codec_id h %{read-lines:c_flags}))) (rule (targets codec_id.ml) (deps codec_id_stubs.h c_flags) (action (run ../gen_code/gen_code.exe "%{cc}" codec_id ml %{read-lines:c_flags}))) ocaml-ffmpeg-1.1.11/avdevice/000077500000000000000000000000001457634536500157405ustar00rootroot00000000000000ocaml-ffmpeg-1.1.11/avdevice/avdevice.ml000066400000000000000000000113671457634536500200700ustar00rootroot00000000000000open Avutil external init : unit -> unit = "ocaml_avdevice_init" [@@noalloc] let init_done = ref false let () = if not !init_done then init (); init_done := true let hd = function [] -> raise Not_found | x :: _ -> x external get_audio_input_formats : unit -> (input, audio) format array = "ocaml_avdevice_get_audio_input_formats" let get_audio_input_formats () = Array.to_list (get_audio_input_formats ()) let get_default_audio_input_format () = hd (get_audio_input_formats ()) external get_video_input_formats : unit -> (input, video) format array = "ocaml_avdevice_get_video_input_formats" let get_video_input_formats () = Array.to_list (get_video_input_formats ()) let get_default_video_input_format () = hd (get_video_input_formats ()) external get_audio_output_formats : unit -> (output, audio) format array = "ocaml_avdevice_get_audio_output_formats" let get_audio_output_formats () = Array.to_list (get_audio_output_formats ()) let get_default_audio_output_format () = hd (get_audio_output_formats ()) external get_video_output_formats : unit -> (output, video) format array = "ocaml_avdevice_get_video_output_formats" let get_video_output_formats () = Array.to_list (get_video_output_formats ()) let get_default_video_output_format () = hd (get_video_output_formats ()) let find_input name fmts = try List.find (fun d -> Av.Format.get_input_name d = name) fmts with Not_found -> raise (Error (`Failure ("Input device not found : " ^ name))) let find_audio_input name = find_input name (get_audio_input_formats ()) let find_video_input name = find_input name (get_video_input_formats ()) let find_output name fmts = try List.find (fun d -> Av.Format.get_output_name d = name) fmts with Not_found -> raise (Error (`Failure ("Output device not found : " ^ name))) let find_audio_output name = find_output name (get_audio_output_formats ()) let find_video_output name = find_output name (get_video_output_formats ()) let open_audio_input name = Av.open_input ~format:(find_audio_input name) "" let open_default_audio_input () = Av.open_input ~format:(get_default_audio_input_format ()) "" let open_video_input name = Av.open_input ~format:(find_video_input name) "" let open_default_video_input () = Av.open_input ~format:(get_default_video_input_format ()) "" external open_output_format : (output, _) format -> bool -> (string * string) array -> output container * string array = "ocaml_av_open_output_format" let _opt_val = function | `String s -> s | `Int i -> string_of_int i | `Int64 i -> Int64.to_string i | `Float f -> string_of_float f let mk_opts = function | None -> [||] | Some opts -> Array.of_list (Hashtbl.fold (fun opt_name opt_val cur -> (opt_name, _opt_val opt_val) :: cur) opts []) let filter_opts unused = function | None -> () | Some opts -> Hashtbl.filter_map_inplace (fun k v -> if Array.mem k unused then Some v else None) opts let open_audio_output ?(interleaved = true) ?opts name = let ret, unused = open_output_format (find_audio_output name) interleaved (mk_opts opts) in filter_opts unused opts; ret let open_default_audio_output ?(interleaved = true) ?opts () = let ret, unused = open_output_format (get_default_audio_output_format ()) interleaved (mk_opts opts) in filter_opts unused opts; ret let open_video_output ?(interleaved = true) ?opts name = let ret, unused = open_output_format (find_video_output name) interleaved (mk_opts opts) in filter_opts unused opts; ret let open_default_video_output ?(interleaved = true) ?opts () = let ret, unused = open_output_format (get_default_video_output_format ()) interleaved (mk_opts opts) in filter_opts unused opts; ret module App_to_dev = struct type message = | None | Window_size of int * int * int * int | Window_repaint of int * int * int * int | Pause | Play | Toggle_pause | Set_volume of float | Mute | Unmute | Toggle_mute | Get_volume | Get_mute external control_message : message -> _ container -> unit = "ocaml_avdevice_app_to_dev_control_message" let control_messages messages av = List.iter (fun msg -> control_message msg av) messages end module Dev_to_app = struct type message = | None | Create_window_buffer of (int * int * int * int) option | Prepare_window_buffer | Display_window_buffer | Destroy_window_buffer | Buffer_overflow | Buffer_underflow | Buffer_readable of Int64.t option | Buffer_writable of Int64.t option | Mute_state_changed of bool | Volume_level_changed of float external set_control_message_callback : (message -> unit) -> _ container -> unit = "ocaml_avdevice_set_control_message_callback" end ocaml-ffmpeg-1.1.11/avdevice/avdevice.mli000066400000000000000000000077461457634536500202470ustar00rootroot00000000000000(** Tits module contains input and output devices for grabbing from and rendering to many common multimedia input/output software frameworks. *) open Avutil (** Initialize the module. This is done implicitely if you use any of the module's API but is here to provide an easy way to make sure that the module is explicitely linked by the OCaml compiler. NOT thread-safe! *) val init : unit -> unit (** Return the audio input devices formats. *) val get_audio_input_formats : unit -> (input, audio) format list (** Return the default audio input device format. *) val get_default_audio_input_format : unit -> (input, audio) format (** Return the video input devices formats. *) val get_video_input_formats : unit -> (input, video) format list (** Return the default video input device format. *) val get_default_video_input_format : unit -> (input, video) format (** Return the audio output devices formats. *) val get_audio_output_formats : unit -> (output, audio) format list (** Return the default audio output device format. *) val get_default_audio_output_format : unit -> (output, audio) format (** Return the video output devices formats. *) val get_video_output_formats : unit -> (output, video) format list (** Return the default video output device format. *) val get_default_video_output_format : unit -> (output, video) format (** Open the audio input device from its name. Raise Error if the device is not found. *) val open_audio_input : string -> input container (** Open the default audio input device from its name. Raise Error if the device is not found. *) val open_default_audio_input : unit -> input container (** Open the video input device from its name. Raise Error if the device is not found. *) val open_video_input : string -> input container (** Open the default video input device from its name. Raise Error if the device is not found. *) val open_default_video_input : unit -> input container (** Open the audio output device from its name. Raise Error if the device is not found. *) val open_audio_output : ?interleaved:bool -> ?opts:opts -> string -> output container (** Open the default audio output device from its name. Raise Error if the device is not found. *) val open_default_audio_output : ?interleaved:bool -> ?opts:opts -> unit -> output container (** Open the video output device from its name. Raise Error if the device is not found. *) val open_video_output : ?interleaved:bool -> ?opts:opts -> string -> output container (** Open the default video output device from its name. Raise Error if the device is not found. *) val open_default_video_output : ?interleaved:bool -> ?opts:opts -> unit -> output container (** Application to device communication *) module App_to_dev : sig (** Application to device control messages *) type message = | None | Window_size of int * int * int * int | Window_repaint of int * int * int * int | Pause | Play | Toggle_pause | Set_volume of float | Mute | Unmute | Toggle_mute | Get_volume | Get_mute (** [Avdevice.App_to_dev.control_messages msg_list device] send the [msg_list] list of control message to the [device]. Raise Error if the application to device control message failed. *) val control_messages : message list -> _ container -> unit end (** Device to application communication *) module Dev_to_app : sig (** Device to application control messages *) type message = | None | Create_window_buffer of (int * int * int * int) option | Prepare_window_buffer | Display_window_buffer | Destroy_window_buffer | Buffer_overflow | Buffer_underflow | Buffer_readable of Int64.t option | Buffer_writable of Int64.t option | Mute_state_changed of bool | Volume_level_changed of float (** [Avdevice.Dev_to_app.set_control_message_callback callback device] set the [callback] for [device] message reception. *) val set_control_message_callback : (message -> unit) -> _ container -> unit end ocaml-ffmpeg-1.1.11/avdevice/avdevice_stubs.c000066400000000000000000000162151457634536500211170ustar00rootroot00000000000000#define CAML_NAME_SPACE 1 #include #include #include #include #include #include #include #include #include "av_stubs.h" #include "avutil_stubs.h" CAMLprim value ocaml_avdevice_init(value unit) { CAMLparam0(); avdevice_register_all(); CAMLreturn(Val_unit); } static value get_input_devices(avioformat_const AVInputFormat *( *input_device_next)(avioformat_const AVInputFormat *)) { CAMLparam0(); CAMLlocal2(v, ans); avioformat_const AVInputFormat *fmt = NULL; int len = 0; while ((fmt = input_device_next(fmt))) len++; ans = caml_alloc_tuple(len); int i = 0; fmt = NULL; while ((fmt = input_device_next(fmt))) { value_of_inputFormat(fmt, &v); Store_field(ans, i, v); i++; } CAMLreturn(ans); } CAMLprim value ocaml_avdevice_get_audio_input_formats(value unit) { CAMLparam0(); CAMLreturn(get_input_devices(av_input_audio_device_next)); } CAMLprim value ocaml_avdevice_get_video_input_formats(value unit) { CAMLparam0(); CAMLreturn(get_input_devices(av_input_video_device_next)); } static value get_output_devices(avioformat_const AVOutputFormat *( *output_device_next)(avioformat_const AVOutputFormat *)) { CAMLparam0(); CAMLlocal1(ans); avioformat_const AVOutputFormat *fmt = NULL; int len = 0; while ((fmt = output_device_next(fmt))) len++; ans = caml_alloc_tuple(len); int i = 0; fmt = NULL; while ((fmt = output_device_next(fmt))) { Store_field(ans, i, value_of_outputFormat(fmt)); i++; } CAMLreturn(ans); } CAMLprim value ocaml_avdevice_get_audio_output_formats(value unit) { CAMLparam0(); CAMLreturn(get_output_devices(av_output_audio_device_next)); } CAMLprim value ocaml_avdevice_get_video_output_formats(value unit) { CAMLparam0(); CAMLreturn(get_output_devices(av_output_video_device_next)); } static const enum AVAppToDevMessageType APP_TO_DEV_MESSAGE_TYPES[] = { AV_APP_TO_DEV_NONE, AV_APP_TO_DEV_PAUSE, AV_APP_TO_DEV_PLAY, AV_APP_TO_DEV_TOGGLE_PAUSE, AV_APP_TO_DEV_MUTE, AV_APP_TO_DEV_UNMUTE, AV_APP_TO_DEV_TOGGLE_MUTE, AV_APP_TO_DEV_GET_VOLUME, AV_APP_TO_DEV_GET_MUTE}; static const enum AVAppToDevMessageType APP_TO_DEV_MESSAGE_WITH_DATA_TYPES[] = { AV_APP_TO_DEV_WINDOW_SIZE, AV_APP_TO_DEV_WINDOW_REPAINT, AV_APP_TO_DEV_SET_VOLUME}; CAMLprim value ocaml_avdevice_app_to_dev_control_message(value _message, value _av) { CAMLparam2(_message, _av); enum AVAppToDevMessageType message_type; void *data = NULL; size_t data_size = 0; double dbl; AVDeviceRect rect; if (Is_block(_message)) { message_type = APP_TO_DEV_MESSAGE_WITH_DATA_TYPES[Tag_val(_message)]; if (message_type == AV_APP_TO_DEV_SET_VOLUME) { dbl = Double_val(Field(_message, 0)); data = &dbl; data_size = sizeof(dbl); } else { rect.x = Int_val(Field(_message, 0)); rect.y = Int_val(Field(_message, 1)); rect.width = Int_val(Field(_message, 2)); rect.height = Int_val(Field(_message, 3)); if (message_type == AV_APP_TO_DEV_WINDOW_SIZE || rect.width > 0) { data = ▭ data_size = sizeof(rect); } } } else { message_type = APP_TO_DEV_MESSAGE_TYPES[Int_val(_message)]; } caml_release_runtime_system(); AVFormatContext *format_context = ocaml_av_get_format_context(&_av); int ret = avdevice_app_to_dev_control_message(format_context, message_type, data, data_size); caml_acquire_runtime_system(); if (ret < 0) ocaml_avutil_raise_error(ret); CAMLreturn(Val_unit); } #define NONE_TAG 0 #define CREATE_WINDOW_BUFFER_TAG 0 #define PREPARE_WINDOW_BUFFER_TAG 1 #define DISPLAY_WINDOW_BUFFER_TAG 2 #define DESTROY_WINDOW_BUFFER_TAG 3 #define BUFFER_OVERFLOW_TAG 4 #define BUFFER_UNDERFLOW_TAG 5 #define BUFFER_READABLE_TAG 1 #define BUFFER_WRITABLE_TAG 2 #define MUTE_STATE_CHANGED_TAG 3 #define VOLUME_LEVEL_CHANGED_TAG 4 static int ocaml_control_message_callback(struct AVFormatContext *ctx, int type, void *data, size_t data_size) { CAMLparam0(); CAMLlocal3(msg, opt, res); enum AVDevToAppMessageType message_type = (enum AVDevToAppMessageType)type; int ret = 0; if (message_type == AV_DEV_TO_APP_NONE) { msg = Val_int(NONE_TAG); } else if (message_type == AV_DEV_TO_APP_CREATE_WINDOW_BUFFER) { if (data) { AVDeviceRect *rect = (AVDeviceRect *)data; opt = caml_alloc_tuple(4); Store_field(opt, 0, Val_int(rect->x)); Store_field(opt, 1, Val_int(rect->y)); Store_field(opt, 2, Val_int(rect->width)); Store_field(opt, 3, Val_int(rect->height)); } else { opt = Val_int(0); } msg = caml_alloc(1, CREATE_WINDOW_BUFFER_TAG); Store_field(msg, 0, opt); } else if (message_type == AV_DEV_TO_APP_PREPARE_WINDOW_BUFFER) { msg = Val_int(PREPARE_WINDOW_BUFFER_TAG); } else if (message_type == AV_DEV_TO_APP_DISPLAY_WINDOW_BUFFER) { msg = Val_int(DISPLAY_WINDOW_BUFFER_TAG); } else if (message_type == AV_DEV_TO_APP_DESTROY_WINDOW_BUFFER) { msg = Val_int(DESTROY_WINDOW_BUFFER_TAG); } else if (message_type == AV_DEV_TO_APP_BUFFER_OVERFLOW) { msg = Val_int(BUFFER_OVERFLOW_TAG); } else if (message_type == AV_DEV_TO_APP_BUFFER_UNDERFLOW) { msg = Val_int(BUFFER_UNDERFLOW_TAG); } else if (message_type == AV_DEV_TO_APP_BUFFER_READABLE || message_type == AV_DEV_TO_APP_BUFFER_WRITABLE) { if (data) { opt = caml_alloc_tuple(1); Store_field(opt, 0, caml_copy_int64(*((int64_t *)data))); } else { opt = Val_int(0); } msg = caml_alloc(1, message_type == AV_DEV_TO_APP_BUFFER_READABLE ? BUFFER_READABLE_TAG : BUFFER_WRITABLE_TAG); Store_field(msg, 0, opt); } else if (message_type == AV_DEV_TO_APP_MUTE_STATE_CHANGED) { msg = caml_alloc(1, MUTE_STATE_CHANGED_TAG); Store_field(msg, 0, (*((int *)data)) ? Val_true : Val_false); } else if (message_type == AV_DEV_TO_APP_VOLUME_LEVEL_CHANGED) { msg = caml_alloc(1, VOLUME_LEVEL_CHANGED_TAG); Store_field(msg, 0, caml_copy_double(*((double *)data))); } res = caml_callback_exn(*ocaml_av_get_control_message_callback(ctx), msg); if (Is_exception_result(res)) { res = Extract_exception(res); ret = AVERROR_UNKNOWN; } CAMLreturn(ret); } static int c_control_message_callback(struct AVFormatContext *ctx, int type, void *data, size_t data_size) { ocaml_ffmpeg_register_thread(); caml_acquire_runtime_system(); int ret = ocaml_control_message_callback(ctx, type, data, data_size); caml_release_runtime_system(); return ret; } CAMLprim value ocaml_avdevice_set_control_message_callback( value _control_message_callback, value _av) { CAMLparam2(_control_message_callback, _av); caml_release_runtime_system(); ocaml_av_set_control_message_callback(&_av, c_control_message_callback, &_control_message_callback); caml_acquire_runtime_system(); CAMLreturn(Val_unit); } ocaml-ffmpeg-1.1.11/avdevice/config/000077500000000000000000000000001457634536500172055ustar00rootroot00000000000000ocaml-ffmpeg-1.1.11/avdevice/config/discover.ml000066400000000000000000000017711457634536500213630ustar00rootroot00000000000000module C = Configurator.V1 let os_type = ref "" let () = C.main ~args:[("--os_type", String (fun s -> os_type := s), "")] ~name:"ffmpeg-avdevice-pkg-config" (fun c -> let default : C.Pkg_config.package_conf = { libs = ["-lavdevice"]; cflags = [] } in let conf = match C.Pkg_config.get c with | None -> default | Some pc -> ( match C.Pkg_config.query_expr_err pc ~package:"libavdevice" ~expr:"libavdevice >= 57.10.100" with | Error msg -> failwith msg | Ok deps -> deps) in let libs = if !os_type = "Win32" then List.filter (fun flag -> String.length flag < 3 || (String.sub flag 0 3 <> "-Wl" && flag <> "-static-libgcc")) conf.libs else conf.libs in C.Flags.write_sexp "c_flags.sexp" conf.cflags; C.Flags.write_sexp "c_library_flags.sexp" libs) ocaml-ffmpeg-1.1.11/avdevice/config/dune000066400000000000000000000000751457634536500200650ustar00rootroot00000000000000(executable (name discover) (libraries dune.configurator)) ocaml-ffmpeg-1.1.11/avdevice/dune000066400000000000000000000006121457634536500166150ustar00rootroot00000000000000(library (name avdevice) (public_name ffmpeg-avdevice) (synopsis "Bindings to ffmpeg's avdevice library") (foreign_stubs (language c) (names avdevice_stubs) (flags (:include c_flags.sexp))) (c_library_flags (:include c_library_flags.sexp)) (libraries ffmpeg-av)) (rule (targets c_flags.sexp c_library_flags.sexp) (action (run ./config/discover.exe --os_type %{os_type}))) ocaml-ffmpeg-1.1.11/avfilter/000077500000000000000000000000001457634536500157665ustar00rootroot00000000000000ocaml-ffmpeg-1.1.11/avfilter/avfilter.ml000066400000000000000000000374431457634536500201470ustar00rootroot00000000000000(** This module provides an API to AVfilter. *) type valued_arg = [ `String of string | `Int of int | `Int64 of int64 | `Float of float | `Rational of Avutil.rational ] type args = [ `Flag of string | `Pair of string * valued_arg ] type ('a, 'b) av = { audio : 'a; video : 'b } type ('a, 'b) io = { inputs : 'a; outputs : 'b } type _config type filter_ctx type ('a, 'b, 'c) pad = { pad_name : string; filter_name : string; media_type : 'b; idx : int; filter_ctx : filter_ctx option; _config : _config option; } type ('a, 'b) pads = (('a, [ `Audio ], 'b) pad list, ('a, [ `Video ], 'b) pad list) av type flag = [ `Dynamic_inputs | `Dynamic_outputs | `Slice_threads | `Support_timeline_generic | `Support_timeline_internal ] type 'a filter = { name : string; description : string; options : Avutil.Options.t; flags : flag list; io : (('a, [ `Input ]) pads, ('a, [ `Output ]) pads) io; } type 'a input = [ `Frame of 'a Avutil.frame | `Flush ] -> unit type 'a context = filter_ctx type 'a output = { context : 'a context; handler : unit -> 'a Avutil.frame } type 'a entries = (string * 'a) list type inputs = ([ `Audio ] input entries, [ `Video ] input entries) av type outputs = ([ `Audio ] output entries, [ `Video ] output entries) av type t = (inputs, outputs) io type config = { c : _config; mutable names : string list; mutable video_inputs : filter_ctx entries; mutable audio_inputs : filter_ctx entries; mutable video_outputs : filter_ctx entries; mutable audio_outputs : filter_ctx entries; } external time_base : filter_ctx -> Avutil.rational = "ocaml_avfilter_buffersink_get_time_base" external frame_rate : filter_ctx -> Avutil.rational = "ocaml_avfilter_buffersink_get_frame_rate" external width : filter_ctx -> int = "ocaml_avfilter_buffersink_get_w" external height : filter_ctx -> int = "ocaml_avfilter_buffersink_get_h" external pixel_aspect : filter_ctx -> Avutil.rational option = "ocaml_avfilter_buffersink_get_pixel_aspect" external pixel_format : filter_ctx -> Avutil.Pixel_format.t = "ocaml_avfilter_buffersink_get_pixel_format" external channels : filter_ctx -> int = "ocaml_avfilter_buffersink_get_channels" external channel_layout : filter_ctx -> Avutil.Channel_layout.t = "ocaml_avfilter_buffersink_get_channel_layout" external sample_rate : filter_ctx -> int = "ocaml_avfilter_buffersink_get_sample_rate" external sample_format : filter_ctx -> Avutil.Sample_format.t = "ocaml_avfilter_buffersink_get_sample_format" external set_frame_size : filter_ctx -> int -> unit = "ocaml_avfilter_buffersink_set_frame_size" exception Exists type ('a, 'b, 'c) _filter = { _name : string; _description : string; _inputs : ('a, 'b, 'c) pad array; _outputs : ('a, 'b, 'c) pad array; _options : Avutil.Options.t; _flags : int; } external register_all : unit -> unit = "ocaml_avfilter_register_all" let () = register_all () external get_all_filters : unit -> ([ `Unattached ], 'a, 'c) _filter array = "ocaml_avfilter_get_all_filters" external int_of_flag : flag -> int = "ocaml_avfilter_int_of_flag" let split_pads pads = let audio, video = Array.fold_left (fun (a, v) pad -> if pad.media_type = `Audio then ( let pad : (_, [ `Audio ], _) pad = { pad with media_type = `Audio } in (pad :: a, v)) else ( let pad : (_, [ `Video ], _) pad = { pad with media_type = `Video } in (a, pad :: v))) ([], []) pads in let audio = List.sort (fun pad1 pad2 -> compare pad1.idx pad2.idx) audio in let video = List.sort (fun pad1 pad2 -> compare pad1.idx pad2.idx) video in { audio; video } let filters, abuffer, buffer, abuffersink, buffersink = let filters, abuffer, buffer, abuffersink, buffersink = Array.fold_left (fun (filters, abuffer, buffer, abuffersink, buffersink) { _name; _description; _options; _inputs; _outputs; _flags } -> let io = { inputs = split_pads _inputs; outputs = split_pads _outputs } in let flags = List.filter (fun flag -> int_of_flag flag land _flags <> 0) [ `Dynamic_inputs; `Dynamic_outputs; `Slice_threads; `Support_timeline_generic; `Support_timeline_internal; ] in let filter = { name = _name; description = _description; options = _options; io; flags; } in match _name with | s when s = "abuffer" -> (filters, Some filter, buffer, abuffersink, buffersink) | s when s = "buffer" -> (filters, abuffer, Some filter, abuffersink, buffersink) | s when s = "abuffersink" -> (filters, abuffer, buffer, Some filter, buffersink) | s when s = "buffersink" -> (filters, abuffer, buffer, abuffersink, Some filter) | _ -> (filter :: filters, abuffer, buffer, abuffersink, buffersink)) ([], None, None, None, None) (get_all_filters ()) in let sort = List.sort (fun f1 f2 -> compare f1.name f2.name) in let get_some = function | Some f -> f | None -> failwith "ffmpeg API error: missing buffer or sink!" in ( sort filters, get_some abuffer, get_some buffer, get_some abuffersink, get_some buffersink ) let find name = List.find (fun f -> f.name = name) filters let find_opt name = List.find_opt (fun f -> f.name = name) filters let pad_name { pad_name; _ } = pad_name external init : unit -> _config = "ocaml_avfilter_init" let init () = { c = init (); names = []; audio_inputs = []; video_inputs = []; audio_outputs = []; video_outputs = []; } external create_filter : ?args:string -> name:string -> string -> _config -> filter_ctx * ('a, 'b, 'c) pad array * ('a, 'b, 'c) pad array = "ocaml_avfilter_create_filter" let rec args_of_args cur = function | [] -> cur | `Flag s :: args -> args_of_args (s :: cur) args | `Pair (lbl, `String s) :: args -> args_of_args (Printf.sprintf "%s=%s" lbl s :: cur) args | `Pair (lbl, `Int i) :: args -> args_of_args (Printf.sprintf "%s=%i" lbl i :: cur) args | `Pair (lbl, `Int64 i) :: args -> args_of_args (Printf.sprintf "%s=%Li" lbl i :: cur) args | `Pair (lbl, `Float f) :: args -> args_of_args (Printf.sprintf "%s=%f" lbl f :: cur) args | `Pair (lbl, `Rational { Avutil.num; den }) :: args -> args_of_args (Printf.sprintf "%s=%i/%i" lbl num den :: cur) args let args_of_args = function | Some args -> Some (String.concat ":" (args_of_args [] args)) | None -> None let attach_pad filter_ctx graph pad = { pad with filter_ctx = Some filter_ctx; _config = Some graph.c } let append_io graph ~name filter_name filter_ctx = match filter_name with | "abuffer" -> graph.audio_inputs <- (name, filter_ctx) :: graph.audio_inputs | "buffer" -> graph.video_inputs <- (name, filter_ctx) :: graph.video_inputs | "abuffersink" -> graph.audio_outputs <- (name, filter_ctx) :: graph.audio_outputs | "buffersink" -> graph.video_outputs <- (name, filter_ctx) :: graph.video_outputs | _ -> () (* This creates a record with a hidden field in the last position. *) external append_context : [ `Unattached ] filter -> filter_ctx -> [ `Attached ] filter = "ocaml_avfilter_append_context" let attach ?args ~name filter graph = if List.mem name graph.names then raise Exists; let args = args_of_args args in let filter_ctx, inputs, outputs = create_filter ?args ~name filter.name graph.c in let io = { inputs = split_pads inputs; outputs = split_pads outputs } in let f () = List.map (attach_pad filter_ctx graph) in let inputs = { audio = (f ()) io.inputs.audio; video = (f ()) io.inputs.video } in let outputs = { audio = (f ()) io.outputs.audio; video = (f ()) io.outputs.video } in let io = { inputs; outputs } in let filter = { filter with io } in graph.names <- name :: graph.names; append_io graph ~name filter.name filter_ctx; append_context filter filter_ctx external link : filter_ctx -> int -> filter_ctx -> int -> unit = "ocaml_avfilter_link" let get_some = function Some x -> x | None -> assert false let link src dst = link (get_some src.filter_ctx) src.idx (get_some dst.filter_ctx) dst.idx type command_flag = [ `Fast ] (* For now.. *) let int_of_flag = function `Fast -> 1 external process_command : flags:int -> cmd:string -> arg:string -> filter_ctx -> string = "ocaml_avfilter_process_commands" external get_context : [ `Attached ] filter -> filter_ctx = "ocaml_avfilter_get_content" let process_command ?(flags = []) ~cmd ?(arg = "") filter = let flags = List.fold_left (fun cur flag -> cur land int_of_flag flag) 0 flags in process_command ~flags ~cmd ~arg (get_context filter) type ('a, 'b, 'c) parse_node = { node_name : string; node_args : args list option; node_pad : ('a, 'b, 'c) pad; } type ('a, 'b) parse_av = ( ('a, [ `Audio ], 'b) parse_node list, ('a, [ `Video ], 'b) parse_node list ) av type 'a parse_io = (('a, [ `Input ]) parse_av, ('a, [ `Output ]) parse_av) io external parse : inputs:(string * filter_ctx * int) array -> outputs:(string * filter_ctx * int) array -> string -> _config -> unit = "ocaml_avfilter_parse" let parse ({ inputs; outputs } : [ `Unattached ] parse_io) filters graph = let get_pad (type a b) (node : ([ `Unattached ], a, b) parse_node) : ([ `Unattached ], a, b) parse_node * filter_ctx = let { node_name; node_args; node_pad } = node in if List.mem node_name graph.names then raise Exists; let { filter_name; _ } = node_pad in let args = args_of_args node_args in let filter_ctx, _, _ = create_filter ?args ~name:node_name filter_name graph.c in graph.names <- node_name :: graph.names; append_io graph ~name:node_name filter_name filter_ctx; (node, filter_ctx) in let audio_inputs = List.map get_pad inputs.audio in let video_inputs = List.map get_pad inputs.video in let audio_outputs = List.map get_pad outputs.audio in let video_outputs = List.map get_pad outputs.video in let get_ctx ({ node_name; node_pad; _ }, filter_ctx) = (node_name, filter_ctx, node_pad.idx) in let inputs = Array.of_list (List.map get_ctx audio_inputs @ List.map get_ctx video_inputs) in let outputs = Array.of_list (List.map get_ctx audio_outputs @ List.map get_ctx video_outputs) in parse ~inputs ~outputs filters graph.c; let attach_pad (type a b) ((node : ([ `Unattached ], a, b) parse_node), filter_ctx) : ([ `Attached ], a, b) parse_node = { node with node_pad = attach_pad filter_ctx graph node.node_pad } in let audio = List.map attach_pad audio_inputs in let video = List.map attach_pad video_inputs in let inputs = { audio; video } in let audio = List.map attach_pad audio_outputs in let video = List.map attach_pad video_outputs in let outputs = { audio; video } in { inputs; outputs } external config : _config -> unit = "ocaml_avfilter_config" (* First argument is not used but here to make sure that _config is not GCed while using the filters. *) external write_frame : _config -> filter_ctx -> 'a Avutil.frame -> unit = "ocaml_avfilter_write_frame" external write_eof_frame : _config -> filter_ctx -> unit = "ocaml_avfilter_write_eof_frame" let write_frame config filter = function | `Frame frame -> write_frame config filter frame | `Flush -> write_eof_frame config filter (* First argument is not used but here to make sure that _config is not GCed while using the filters. *) external get_frame : _config -> filter_ctx -> 'b Avutil.frame = "ocaml_avfilter_get_frame" let launch graph = config graph.c; let audio = List.map (fun (name, filter_ctx) -> (name, write_frame graph.c filter_ctx)) graph.audio_inputs in let video = List.map (fun (name, filter_ctx) -> (name, write_frame graph.c filter_ctx)) graph.video_inputs in let inputs = { audio; video } in let audio = List.map (fun (name, filter_ctx) -> ( name, { context = filter_ctx; handler = (fun () -> get_frame graph.c filter_ctx); } )) graph.audio_outputs in let video = List.map (fun (name, filter_ctx) -> ( name, { context = filter_ctx; handler = (fun () -> get_frame graph.c filter_ctx); } )) graph.video_outputs in let outputs = { audio; video } in { outputs; inputs } module Utils = struct type audio_converter = { time_base : Avutil.rational; filter_in : [ `Frame of Avutil.audio Avutil.frame | `Flush ] -> unit; filter_out : unit -> Avutil.audio Avutil.frame; } type audio_params = { sample_rate : int; channel_layout : Avutil.Channel_layout.t; sample_format : Avutil.Sample_format.t; } let init_audio_converter ?out_params ?out_frame_size ~in_time_base ~in_params () = let abuffer_args = [ `Pair ("sample_rate", `Int in_params.sample_rate); `Pair ("time_base", `Rational in_time_base); `Pair ( "channel_layout", `Int64 (Avutil.Channel_layout.get_id in_params.channel_layout) ); `Pair ( "sample_fmt", `Int (Avutil.Sample_format.get_id in_params.sample_format) ); ] in let graph = init () in let abuffer = attach ~args:abuffer_args ~name:"abuffer" abuffer graph in let output = match abuffer.io.outputs.audio with o :: _ -> o | _ -> assert false in let output = match out_params with | None -> output | Some out_params -> let aresample = find "aresample" in let args = [ `Pair ("in_sample_rate", `Int in_params.sample_rate); `Pair ( "in_channel_layout", `Int64 (Avutil.Channel_layout.get_id in_params.channel_layout) ); `Pair ( "in_sample_fmt", `Int (Avutil.Sample_format.get_id in_params.sample_format) ); `Pair ("out_sample_rate", `Int out_params.sample_rate); `Pair ( "out_channel_layout", `Int64 (Avutil.Channel_layout.get_id out_params.channel_layout) ); `Pair ( "out_sample_fmt", `Int (Avutil.Sample_format.get_id out_params.sample_format) ); ] in let aresample = attach ~args ~name:"aresample" aresample graph in let ainput, aoutput = match (aresample.io.inputs.audio, aresample.io.outputs.audio) with | i :: _, o :: _ -> (i, o) | _ -> assert false in link output ainput; aoutput in let abuffersink = attach ~name:"sink" abuffersink graph in let () = match abuffersink.io.inputs.audio with | input :: _ -> link output input | _ -> assert false in let filter = launch graph in let filter_in, filter_out = match (filter.inputs.audio, filter.outputs.audio) with | (_, i) :: _, (_, o) :: _ -> (i, o) | _ -> assert false in let () = match out_frame_size with | None -> () | Some frame_size -> set_frame_size filter_out.context frame_size in let time_base = time_base filter_out.context in { time_base; filter_in; filter_out = filter_out.handler } let convert_audio { filter_in; filter_out; _ } cb frame = let rec flush () = try cb (filter_out ()); flush () with Avutil.Error `Eagain -> () in filter_in frame; try flush () with Avutil.Error `Eof when frame = `Flush -> () let time_base { time_base; _ } = time_base end ocaml-ffmpeg-1.1.11/avfilter/avfilter.mli000066400000000000000000000102261457634536500203060ustar00rootroot00000000000000(** This module provides an API to AVfilter. *) open Avutil type config type valued_arg = [ `String of string | `Int of int | `Int64 of int64 | `Float of float | `Rational of rational ] type args = [ `Flag of string | `Pair of string * valued_arg ] type ('a, 'b) av = { audio : 'a; video : 'b } type ('a, 'b) io = { inputs : 'a; outputs : 'b } (** (attached/unattached, audio/video, input/output) pad *) type ('a, 'b, 'c) pad type ('a, 'b) pads = (('a, [ `Audio ], 'b) pad list, ('a, [ `Video ], 'b) pad list) av type flag = [ `Dynamic_inputs | `Dynamic_outputs | `Slice_threads | `Support_timeline_generic | `Support_timeline_internal ] type 'a filter = { name : string; description : string; options : Avutil.Options.t; flags : flag list; io : (('a, [ `Input ]) pads, ('a, [ `Output ]) pads) io; } type 'a input = [ `Frame of 'a frame | `Flush ] -> unit type 'a context type 'a output = { context : 'a context; handler : unit -> 'a frame } type 'a entries = (string * 'a) list type inputs = ([ `Audio ] input entries, [ `Video ] input entries) av type outputs = ([ `Audio ] output entries, [ `Video ] output entries) av type t = (inputs, outputs) io (* Output context. *) val time_base : _ context -> Avutil.rational val frame_rate : [ `Video ] context -> Avutil.rational val width : [ `Video ] context -> int val height : [ `Video ] context -> int val pixel_aspect : [ `Video ] context -> Avutil.rational option val pixel_format : [ `Video ] context -> Avutil.Pixel_format.t val channels : [ `Audio ] context -> int val channel_layout : [ `Audio ] context -> Avutil.Channel_layout.t val sample_rate : [ `Audio ] context -> int val sample_format : [ `Audio ] context -> Avutil.Sample_format.t val set_frame_size : [ `Audio ] context -> int -> unit exception Exists (** Filter list. *) val filters : [ `Unattached ] filter list val find : string -> [ `Unattached ] filter val find_opt : string -> [ `Unattached ] filter option (** Buffers (input). *) val abuffer : [ `Unattached ] filter val buffer : [ `Unattached ] filter (** Sinks (output). *) val abuffersink : [ `Unattached ] filter val buffersink : [ `Unattached ] filter (** Pad name. *) val pad_name : _ pad -> string (** Initiate a filter graph configuration. *) val init : unit -> config (** Attach a filter to a filter graph configuration. Raises [Exists] if there is already a filter by that name in the graph. Number of inputs or outputs can change from the filter's specifications, in particular if the filter has the [`Dynamic_input] or [`Dynamic_output] flag set. *) val attach : ?args:args list -> name:string -> [ `Unattached ] filter -> config -> [ `Attached ] filter (** Link two filter pads. *) val link : ([ `Attached ], 'a, [ `Output ]) pad -> ([ `Attached ], 'a, [ `Input ]) pad -> unit type command_flag = [ `Fast ] (** Send a command to a attached filter pad. *) val process_command : ?flags:command_flag list -> cmd:string -> ?arg:string -> [ `Attached ] filter -> string (** Parse a graph described by a string and attach outputs/inputs to it. *) type ('a, 'b, 'c) parse_node = { node_name : string; node_args : args list option; node_pad : ('a, 'b, 'c) pad; } type ('a, 'b) parse_av = ( ('a, [ `Audio ], 'b) parse_node list, ('a, [ `Video ], 'b) parse_node list ) av type 'a parse_io = (('a, [ `Input ]) parse_av, ('a, [ `Output ]) parse_av) io val parse : [ `Unattached ] parse_io -> string -> config -> [ `Attached ] parse_io (** Check validity and configure all the links and formats in the graph and return its outputs and outputs. *) val launch : config -> t module Utils : sig type audio_converter type audio_params = { sample_rate : int; channel_layout : Avutil.Channel_layout.t; sample_format : Avutil.Sample_format.t; } val init_audio_converter : ?out_params:audio_params -> ?out_frame_size:int -> in_time_base:Avutil.rational -> in_params:audio_params -> unit -> audio_converter val time_base : audio_converter -> Avutil.rational val convert_audio : audio_converter -> (Avutil.audio Avutil.frame -> unit) -> [ `Frame of Avutil.audio Avutil.frame | `Flush ] -> unit end ocaml-ffmpeg-1.1.11/avfilter/avfilter_stubs.c000066400000000000000000000353361457634536500212000ustar00rootroot00000000000000#include #define CAML_NAME_SPACE 1 #include #include #include #include #include #include #include "avutil_stubs.h" #include "polymorphic_variant_values_stubs.h" #include #include #include #define AvFilterContext_val(v) (*(AVFilterContext **)Data_abstract_val(v)) static inline value value_of_avfiltercontext(value ret, AVFilterContext *avfiltercontext) { ret = caml_alloc(1, Abstract_tag); AvFilterContext_val(ret) = avfiltercontext; return ret; } CAMLprim value ocaml_avfilter_register_all(value unit) { CAMLparam0(); #if LIBAVFILTER_VERSION_INT < AV_VERSION_INT(7, 14, 100) avfilter_register_all(); #endif CAMLreturn(Val_unit); } CAMLprim value ocaml_avfilter_alloc_pads(const AVFilterPad *pads, int pad_count, const char *name) { CAMLparam0(); CAMLlocal2(pad, _pads); int i, pad_type; _pads = caml_alloc_tuple(pad_count); for (i = 0; i < pad_count; i++) { pad = caml_alloc_tuple(6); Store_field(pad, 0, caml_copy_string(avfilter_pad_get_name(pads, i))); Store_field(pad, 1, caml_copy_string(name)); switch (avfilter_pad_get_type(pads, i)) { case AVMEDIA_TYPE_VIDEO: pad_type = PVV_Video; break; case AVMEDIA_TYPE_AUDIO: pad_type = PVV_Audio; break; case AVMEDIA_TYPE_DATA: pad_type = PVV_Data; break; case AVMEDIA_TYPE_SUBTITLE: pad_type = PVV_Subtitle; break; case AVMEDIA_TYPE_ATTACHMENT: pad_type = PVV_Attachment; break; default: pad_type = PVV_Unknown; } Store_field(pad, 2, pad_type); Store_field(pad, 3, Val_int(i)); Store_field(pad, 4, Val_none); Store_field(pad, 5, Val_none); Store_field(_pads, i, pad); } CAMLreturn(_pads); } CAMLprim value ocaml_avfilter_get_all_filters(value unit) { CAMLparam0(); CAMLlocal5(pad, pads, cur, ret, tmp); int c = 0; const AVFilter *f = NULL; #if LIBAVFILTER_VERSION_INT >= AV_VERSION_INT(7, 14, 100) void *opaque = 0; #endif #if LIBAVFILTER_VERSION_INT < AV_VERSION_INT(7, 14, 100) while ((f = avfilter_next(f))) c++; #else while ((f = av_filter_iterate(&opaque))) c++; #endif ret = caml_alloc_tuple(c); c = 0; f = NULL; #if LIBAVFILTER_VERSION_INT < AV_VERSION_INT(7, 14, 100) while ((f = avfilter_next(f))) { #else opaque = 0; while ((f = av_filter_iterate(&opaque))) { #endif cur = caml_alloc_tuple(6); Store_field(cur, 0, caml_copy_string(f->name)); Store_field(cur, 1, caml_copy_string(f->description)); Store_field(cur, 2, ocaml_avfilter_alloc_pads( #if LIBAVFILTER_VERSION_INT < AV_VERSION_INT(8, 3, 100) f->inputs, avfilter_pad_count(f->inputs), f->name #else f->inputs, f->nb_inputs, f->name #endif )); Store_field(cur, 3, ocaml_avfilter_alloc_pads( #if LIBAVFILTER_VERSION_INT < AV_VERSION_INT(8, 3, 100) f->outputs, avfilter_pad_count(f->outputs), f->name #else f->outputs, f->nb_outputs, f->name #endif )); Store_field(cur, 4, value_of_avclass(tmp, f->priv_class)); Store_field(cur, 5, Val_int(f->flags)); Store_field(ret, c, cur); c++; } CAMLreturn(ret); } #define Filter_graph_val(v) (*(AVFilterGraph **)Data_custom_val(v)) static void finalize_filter_graph(value v) { AVFilterGraph *graph = Filter_graph_val(v); avfilter_graph_free(&graph); } static struct custom_operations filter_graph_ops = { "ocaml_avfilter_filter_graph", finalize_filter_graph, custom_compare_default, custom_hash_default, custom_serialize_default, custom_deserialize_default}; CAMLprim value ocaml_avfilter_init(value unit) { CAMLparam0(); CAMLlocal1(ret); AVFilterGraph *graph = avfilter_graph_alloc(); if (!graph) caml_raise_out_of_memory(); ret = caml_alloc_custom(&filter_graph_ops, sizeof(AVFilterGraph *), 1, 0); Filter_graph_val(ret) = graph; CAMLreturn(ret); } CAMLprim value ocaml_avfilter_create_filter(value _args, value _instance_name, value _name, value _graph) { CAMLparam4(_instance_name, _args, _name, _graph); CAMLlocal2(ret, tmp); char *name = NULL; char *args = NULL; AVFilterGraph *graph = Filter_graph_val(_graph); const AVFilter *filter = avfilter_get_by_name(String_val(_name)); AVFilterContext *context; int err; if (!filter) caml_raise_not_found(); name = strndup(String_val(_instance_name), caml_string_length(_instance_name)); if (!name) caml_raise_out_of_memory(); if (_args != Val_none) { args = strndup(String_val(Some_val(_args)), caml_string_length(Some_val(_args))); if (!args) { if (name) free(name); caml_raise_out_of_memory(); } } caml_release_runtime_system(); err = avfilter_graph_create_filter(&context, filter, name, args, NULL, graph); caml_acquire_runtime_system(); if (name) free(name); if (args) free(args); if (err < 0) ocaml_avutil_raise_error(err); ret = caml_alloc_tuple(3); Store_field(ret, 0, value_of_avfiltercontext(tmp, context)); Store_field(ret, 1, ocaml_avfilter_alloc_pads(context->input_pads, context->nb_inputs, filter->name)); Store_field(ret, 2, ocaml_avfilter_alloc_pads(context->output_pads, context->nb_outputs, filter->name)); CAMLreturn(ret); } static void append_avfilter_in_out(AVFilterInOut **filter, char *name, AVFilterContext *filter_ctx, int pad_idx) { AVFilterInOut *cur; if (*filter) { cur = *filter; while (cur) cur = cur->next; cur->next = avfilter_inout_alloc(); cur = cur->next; } else { *filter = avfilter_inout_alloc(); cur = *filter; } if (!cur) { avfilter_inout_free(filter); caml_raise_out_of_memory(); } cur->name = name; cur->filter_ctx = filter_ctx; cur->pad_idx = pad_idx; cur->next = NULL; }; CAMLprim value ocaml_avfilter_process_commands(value _flags, value _cmd, value _arg, value _filter) { CAMLparam3(_cmd, _arg, _filter); char buf[4096] = {0}; char *cmd; char *arg; int err; AVFilterContext *filter_ctx = AvFilterContext_val(_filter); cmd = av_malloc(caml_string_length(_cmd) + 1); if (!cmd) caml_raise_out_of_memory(); arg = av_malloc(caml_string_length(_arg) + 1); if (!arg) { av_free(cmd); caml_raise_out_of_memory(); } memcpy(cmd, String_val(_cmd), caml_string_length(_cmd) + 1); memcpy(arg, String_val(_arg), caml_string_length(_arg) + 1); caml_release_runtime_system(); err = avfilter_process_command(filter_ctx, cmd, arg, buf, sizeof(buf), Int_val(_flags)); caml_acquire_runtime_system(); av_free(cmd); av_free(arg); if (err < 0) ocaml_avutil_raise_error(err); CAMLreturn(caml_copy_string(buf)); } CAMLprim value ocaml_avfilter_parse(value _inputs, value _outputs, value _filters, value _graph) { CAMLparam4(_inputs, _outputs, _filters, _graph); CAMLlocal1(_pad); int c, err, idx; AVFilterInOut *inputs = NULL; AVFilterInOut *outputs = NULL; AVFilterGraph *graph = Filter_graph_val(_graph); AVFilterContext *filter_ctx; char *filters, *name; for (c = 0; c < Wosize_val(_inputs); c++) { _pad = Field(_inputs, c); name = av_strdup(String_val(Field(_pad, 0))); filter_ctx = AvFilterContext_val(Field(_pad, 1)); idx = Int_val(Field(_pad, 2)); append_avfilter_in_out(&inputs, name, filter_ctx, idx); } for (c = 0; c < Wosize_val(_outputs); c++) { _pad = Field(_outputs, c); name = av_strdup(String_val(Field(_pad, 0))); filter_ctx = AvFilterContext_val(Field(_pad, 1)); idx = Int_val(Field(_pad, 2)); append_avfilter_in_out(&outputs, name, filter_ctx, idx); } filters = strndup(String_val(_filters), caml_string_length(_filters)); if (!filters) { if (inputs) avfilter_inout_free(&inputs); if (outputs) avfilter_inout_free(&outputs); caml_raise_out_of_memory(); } caml_release_runtime_system(); err = avfilter_graph_parse_ptr(graph, filters, &inputs, &outputs, NULL); caml_acquire_runtime_system(); free(filters); if (inputs) avfilter_inout_free(&inputs); if (outputs) avfilter_inout_free(&outputs); if (err < 0) ocaml_avutil_raise_error(err); CAMLreturn(Val_unit); } CAMLprim value ocaml_avfilter_append_context(value _filter, value _ctx) { CAMLparam1(_filter); CAMLlocal1(ret); int n = Wosize_val(_filter); int i; ret = caml_alloc_tuple(n + 1); for (i = 0; i < n; i++) { Store_field(ret, i, Field(_filter, i)); } Store_field(ret, n, _ctx); CAMLreturn(ret); } CAMLprim value ocaml_avfilter_get_content(value _filter) { CAMLparam1(_filter); int n = Wosize_val(_filter); CAMLreturn(Field(_filter, n - 1)); } CAMLprim value ocaml_avfilter_link(value _src, value _srcpad, value _dst, value _dstpad) { CAMLparam0(); AVFilterContext *src = AvFilterContext_val(_src); AVFilterContext *dst = AvFilterContext_val(_dst); caml_release_runtime_system(); int err = avfilter_link(src, Int_val(_srcpad), dst, Int_val(_dstpad)); caml_acquire_runtime_system(); if (err < 0) ocaml_avutil_raise_error(err); CAMLreturn(Val_unit); } CAMLprim value ocaml_avfilter_buffersink_get_time_base(value _src) { CAMLparam0(); CAMLlocal1(ret); AVFilterContext *filter_ctx = AvFilterContext_val(_src); AVRational time_base = av_buffersink_get_time_base(filter_ctx); value_of_rational(&time_base, &ret); CAMLreturn(ret); } CAMLprim value ocaml_avfilter_buffersink_get_frame_rate(value _src) { CAMLparam0(); CAMLlocal1(ret); AVFilterContext *filter_ctx = AvFilterContext_val(_src); AVRational frame_rate = av_buffersink_get_frame_rate(filter_ctx); value_of_rational(&frame_rate, &ret); CAMLreturn(ret); } CAMLprim value ocaml_avfilter_buffersink_get_sample_format(value _src) { CAMLparam0(); AVFilterContext *filter_ctx = AvFilterContext_val(_src); int sample_format = av_buffersink_get_format(filter_ctx); CAMLreturn(Val_SampleFormat((enum AVSampleFormat)sample_format)); } CAMLprim value ocaml_avfilter_buffersink_get_w(value _src) { CAMLparam0(); AVFilterContext *filter_ctx = AvFilterContext_val(_src); int w = av_buffersink_get_w(filter_ctx); CAMLreturn(Val_int(w)); } CAMLprim value ocaml_avfilter_buffersink_get_h(value _src) { CAMLparam0(); AVFilterContext *filter_ctx = AvFilterContext_val(_src); int h = av_buffersink_get_h(filter_ctx); CAMLreturn(Val_int(h)); } CAMLprim value ocaml_avfilter_buffersink_get_pixel_format(value _src) { CAMLparam0(); AVFilterContext *filter_ctx = AvFilterContext_val(_src); int pixel_format = av_buffersink_get_format(filter_ctx); CAMLreturn(Val_PixelFormat((enum AVPixelFormat)pixel_format)); } CAMLprim value ocaml_avfilter_buffersink_get_pixel_aspect(value _src) { CAMLparam0(); CAMLlocal2(ans, ret); AVFilterContext *filter_ctx = AvFilterContext_val(_src); AVRational pixel_aspect = av_buffersink_get_sample_aspect_ratio(filter_ctx); if (pixel_aspect.num == 0) CAMLreturn(Val_none); value_of_rational(&pixel_aspect, &ans); ret = caml_alloc_tuple(1); Store_field(ret, 0, ans); CAMLreturn(ret); } CAMLprim value ocaml_avfilter_buffersink_get_channels(value _src) { CAMLparam0(); AVFilterContext *filter_ctx = AvFilterContext_val(_src); int channels = av_buffersink_get_channels(filter_ctx); CAMLreturn(Val_int(channels)); } CAMLprim value ocaml_avfilter_buffersink_get_channel_layout(value _src) { CAMLparam0(); AVFilterContext *filter_ctx = AvFilterContext_val(_src); uint64_t layout = av_buffersink_get_channel_layout(filter_ctx); CAMLreturn(Val_ChannelLayout(layout)); } CAMLprim value ocaml_avfilter_buffersink_get_sample_rate(value _src) { CAMLparam0(); AVFilterContext *filter_ctx = AvFilterContext_val(_src); int sample_rate = av_buffersink_get_sample_rate(filter_ctx); CAMLreturn(Val_int(sample_rate)); } CAMLprim value ocaml_avfilter_buffersink_set_frame_size(value _src, value _size) { CAMLparam0(); AVFilterContext *filter_ctx = AvFilterContext_val(_src); av_buffersink_set_frame_size(filter_ctx, Int_val(_size)); CAMLreturn(Val_unit); } CAMLprim value ocaml_avfilter_config(value _graph) { CAMLparam1(_graph); caml_release_runtime_system(); int err = avfilter_graph_config(Filter_graph_val(_graph), NULL); caml_acquire_runtime_system(); if (err < 0) ocaml_avutil_raise_error(err); CAMLreturn(Val_unit); } CAMLprim value ocaml_avfilter_write_frame(value _config, value _filter, value _frame) { CAMLparam2(_config, _frame); AVFilterContext *filter_ctx = AvFilterContext_val(_filter); caml_release_runtime_system(); int err = av_buffersrc_write_frame(filter_ctx, Frame_val(_frame)); caml_acquire_runtime_system(); if (err < 0) ocaml_avutil_raise_error(err); CAMLreturn(Val_unit); } CAMLprim value ocaml_avfilter_write_eof_frame(value _config, value _filter) { CAMLparam1(_config); AVFilterContext *filter_ctx = AvFilterContext_val(_filter); caml_release_runtime_system(); int err = av_buffersrc_write_frame(filter_ctx, NULL); caml_acquire_runtime_system(); if (err < 0) ocaml_avutil_raise_error(err); CAMLreturn(Val_unit); } CAMLprim value ocaml_avfilter_get_frame(value _config, value _filter) { CAMLparam1(_config); CAMLlocal1(frame_value); AVFilterContext *filter_ctx = AvFilterContext_val(_filter); AVFrame *frame = av_frame_alloc(); if (!frame) { caml_raise_out_of_memory(); } caml_release_runtime_system(); int err = av_buffersink_get_frame(filter_ctx, frame); caml_acquire_runtime_system(); if (err < 0) { av_frame_free(&frame); ocaml_avutil_raise_error(err); } frame_value = value_of_frame(frame); CAMLreturn(frame_value); } CAMLprim value ocaml_avfilter_int_of_flag(value _flag) { CAMLparam1(_flag); switch (_flag) { case PVV_Dynamic_inputs: CAMLreturn(Val_int(AVFILTER_FLAG_DYNAMIC_INPUTS)); case PVV_Dynamic_outputs: CAMLreturn(Val_int(AVFILTER_FLAG_DYNAMIC_OUTPUTS)); case PVV_Slice_threads: CAMLreturn(Val_int(AVFILTER_FLAG_SLICE_THREADS)); case PVV_Support_timeline_generic: CAMLreturn(Val_int(AVFILTER_FLAG_SUPPORT_TIMELINE_GENERIC)); case PVV_Support_timeline_internal: CAMLreturn(Val_int(AVFILTER_FLAG_SUPPORT_TIMELINE_INTERNAL)); default: caml_failwith("Invalid flag type!"); } } ocaml-ffmpeg-1.1.11/avfilter/config/000077500000000000000000000000001457634536500172335ustar00rootroot00000000000000ocaml-ffmpeg-1.1.11/avfilter/config/discover.ml000066400000000000000000000012271457634536500214050ustar00rootroot00000000000000module C = Configurator.V1 let () = C.main ~name:"ffmpeg-avfilter-pkg-config" (fun c -> let default : C.Pkg_config.package_conf = { libs = ["-lavfilter"]; cflags = [] } in let conf = match C.Pkg_config.get c with | None -> default | Some pc -> ( match C.Pkg_config.query_expr_err pc ~package:"libavfilter" ~expr:"libavfilter >= 6.107.100" with | Error msg -> failwith msg | Ok deps -> deps) in C.Flags.write_sexp "c_flags.sexp" conf.cflags; C.Flags.write_sexp "c_library_flags.sexp" conf.libs) ocaml-ffmpeg-1.1.11/avfilter/config/dune000066400000000000000000000000751457634536500201130ustar00rootroot00000000000000(executable (name discover) (libraries dune.configurator)) ocaml-ffmpeg-1.1.11/avfilter/dune000066400000000000000000000005711457634536500166470ustar00rootroot00000000000000(library (name avfilter) (public_name ffmpeg-avfilter) (synopsis "Bindings to ffmpeg's avfilter library") (foreign_stubs (language c) (names avfilter_stubs) (flags (:include c_flags.sexp))) (c_library_flags (:include c_library_flags.sexp)) (libraries ffmpeg-avutil)) (rule (targets c_flags.sexp c_library_flags.sexp) (action (run ./config/discover.exe))) ocaml-ffmpeg-1.1.11/avutil/000077500000000000000000000000001457634536500154565ustar00rootroot00000000000000ocaml-ffmpeg-1.1.11/avutil/avutil.ml000066400000000000000000000546561457634536500173340ustar00rootroot00000000000000(* Line *) type input type output (* Container *) type 'a container (** {1 Media types} *) type audio = [ `Audio ] type video = [ `Video ] type subtitle = [ `Subtitle ] type media_type = Media_types.t (* Format *) type ('line, 'media) format external qp2lambda : unit -> int = "ocaml_avutil_qp2lambda" let qp2lambda = qp2lambda () (* Frame *) module Frame = struct type 'media t external pts : _ t -> Int64.t option = "ocaml_avutil_frame_pts" external set_pts : _ t -> Int64.t option -> unit = "ocaml_avutil_frame_set_pts" external duration : _ t -> Int64.t option = "ocaml_avutil_frame_duration" external set_duration : _ t -> Int64.t option -> unit = "ocaml_avutil_frame_set_duration" external pkt_dts : _ t -> Int64.t option = "ocaml_avutil_frame_pkt_dts" external set_pkt_dts : _ t -> Int64.t option -> unit = "ocaml_avutil_frame_set_pkt_dts" external metadata : _ t -> (string * string) array = "ocaml_avutil_frame_metadata" let metadata frame = Array.to_list (metadata frame) external set_metadata : _ t -> (string * string) array -> unit = "ocaml_avutil_frame_set_metadata" let set_metadata frame metadata = set_metadata frame (Array.of_list metadata) external best_effort_timestamp : _ t -> Int64.t option = "ocaml_avutil_frame_best_effort_timestamp" external pkt_duration : _ t -> Int64.t option = "ocaml_avutil_frame_pkt_duration" external copy : 'a t -> 'b t -> unit = "ocaml_avutil_frame_copy" end type 'media frame = 'media Frame.t type error = [ `Bsf_not_found | `Decoder_not_found | `Demuxer_not_found | `Encoder_not_found | `Eof | `Exit | `Filter_not_found | `Invalid_data | `Muxer_not_found | `Option_not_found | `Patch_welcome | `Protocol_not_found | `Stream_not_found | `Bug | `Eagain | `Unknown | `Experimental | `Other of int | `Failure of string ] external string_of_error : error -> string = "ocaml_avutil_string_of_error" exception Error of error let () = Printexc.register_printer (function | Error err -> Some (Printf.sprintf "Avutil.Error(%s)" (string_of_error err)) | _ -> None) let () = Callback.register_exception "ffmpeg_exn_error" (Error `Unknown); Callback.register "ffmpeg_exn_failure" (fun s -> raise (Error (`Failure s))) type data = (int, Bigarray.int8_unsigned_elt, Bigarray.c_layout) Bigarray.Array1.t let create_data len = Bigarray.Array1.create Bigarray.int8_unsigned Bigarray.c_layout len type rational = { num : int; den : int } external av_d2q : float -> rational = "ocaml_avutil_av_d2q" let string_of_rational { num; den } = Printf.sprintf "%d/%d" num den external time_base : unit -> rational = "ocaml_avutil_time_base" module Time_format = struct type t = [ `Second | `Millisecond | `Microsecond | `Nanosecond ] end module Log = struct type level = [ `Quiet | `Panic | `Fatal | `Error | `Warning | `Info | `Verbose | `Debug | `Trace ] let int_of_level = function | `Quiet -> -8 | `Panic -> 0 | `Fatal -> 8 | `Error -> 16 | `Warning -> 24 | `Info -> 32 | `Verbose -> 40 | `Debug -> 48 | `Trace -> 56 external set_level : int -> unit = "ocaml_avutil_set_log_level" let set_level level = set_level (int_of_level level) external setup_log_callback : unit -> unit = "ocaml_avutil_setup_log_callback" (* external process_log : (string -> unit) -> unit = "ocaml_ffmpeg_process_log" *) let log_fn = ref (Printf.printf "%s") let log_fn_m = Mutex.create () let set_callback fn = setup_log_callback (); Mutex.lock log_fn_m; log_fn := fn; Mutex.unlock log_fn_m external clear_callback : unit -> unit = "ocaml_avutil_clear_log_callback" let clear_callback () = clear_callback (); set_callback (Printf.printf "%s") (* let () = ignore (Thread.create (fun () -> process_log (fun msg -> !log_fn msg)) ()) *) end module Pixel_format = struct type t = Pixel_format.t type flag = Pixel_format_flag.t type component_descriptor = { plane : int; step : int; offset : int; shift : int; depth : int; } (* An extra hidden field is stored on the C side with a reference to the underlying C descriptor for use with the C functions consuming it. *) type descriptor = { name : string; nb_components : int; log2_chroma_w : int; log2_chroma_h : int; flags : flag list; comp : component_descriptor list; alias : string option; } external descriptor : t -> descriptor = "ocaml_avutil_pixelformat_descriptor" external bits : descriptor -> int = "ocaml_avutil_pixelformat_bits_per_pixel" external planes : t -> int = "ocaml_avutil_pixelformat_planes" external to_string : t -> string option = "ocaml_avutil_pixelformat_to_string" external of_string : string -> t = "ocaml_avutil_pixelformat_of_string" external get_id : t -> int = "ocaml_avutil_get_pixel_fmt_id" external find_id : int -> t = "ocaml_avutil_find_pixel_fmt_from_id" end module Channel_layout = struct type t = Channel_layout.t external get_description : t -> int -> string = "ocaml_avutil_get_channel_layout_description" let get_description ?(channels = -1) ch = get_description ch channels external find : string -> t = "ocaml_avutil_get_channel_layout" external get_nb_channels : t -> int = "ocaml_avutil_get_channel_layout_nb_channels" external get_default : int -> t = "ocaml_avutil_get_default_channel_layout" external get_id : t -> int64 = "ocaml_avutil_get_channel_layout_id" external from_id : int64 -> t = "ocaml_avutil_channel_layout_of_id" end module Sample_format = struct type t = Sample_format.t external get_name : t -> string option = "ocaml_avutil_get_sample_fmt_name" external get_id : t -> int = "ocaml_avutil_get_sample_fmt_id" external find : string -> t = "ocaml_avutil_find_sample_fmt" external find_id : int -> t = "ocaml_avutil_find_sample_fmt_from_id" end module Audio = struct external create_frame : Sample_format.t -> Channel_layout.t -> int -> int -> audio frame = "ocaml_avutil_audio_create_frame" external frame_get_sample_format : audio frame -> Sample_format.t = "ocaml_avutil_audio_frame_get_sample_format" external frame_get_sample_rate : audio frame -> int = "ocaml_avutil_audio_frame_get_sample_rate" external frame_get_channels : audio frame -> int = "ocaml_avutil_audio_frame_get_channels" external frame_get_channel_layout : audio frame -> Channel_layout.t = "ocaml_avutil_audio_frame_get_channel_layout" external frame_nb_samples : audio frame -> int = "ocaml_avutil_audio_frame_nb_samples" external frame_copy_samples : audio frame -> int -> audio frame -> int -> int -> unit = "ocaml_avutil_audio_frame_copy_samples" end module Video = struct type planes = (data * int) array external create_frame : int -> int -> Pixel_format.t -> video frame = "ocaml_avutil_video_create_frame" external frame_get_linesize : video frame -> int -> int = "ocaml_avutil_video_frame_get_linesize" external get_frame_planes : video frame -> bool -> planes = "ocaml_avutil_video_get_frame_bigarray_planes" let frame_visit ~make_writable visit frame = visit (get_frame_planes frame make_writable); frame external frame_get_width : video frame -> int = "ocaml_avutil_video_frame_width" external frame_get_height : video frame -> int = "ocaml_avutil_video_frame_height" external frame_get_pixel_format : video frame -> Pixel_format.t = "ocaml_avutil_video_frame_get_pixel_format" external frame_get_pixel_aspect : video frame -> rational option = "ocaml_avutil_video_frame_get_pixel_aspect" end module Subtitle = struct let time_base () = { num = 1; den = 100 } external create_frame : int64 -> int64 -> string array -> subtitle frame = "ocaml_avutil_subtitle_create_frame" let create_frame start_time end_time lines = let num_time_base = float_of_int (time_base ()).num in let den_time_base = float_of_int (time_base ()).den in create_frame (Int64.of_float (start_time *. den_time_base /. num_time_base)) (Int64.of_float (end_time *. den_time_base /. num_time_base)) (Array.of_list lines) external frame_to_lines : subtitle frame -> int64 * int64 * string array = "ocaml_avutil_subtitle_to_lines" let frame_to_lines t = let num_time_base = float_of_int (time_base ()).num in let den_time_base = float_of_int (time_base ()).den in let s, e, lines = frame_to_lines t in Int64. ( to_float s *. num_time_base /. den_time_base, to_float e *. num_time_base /. den_time_base, Array.to_list lines ) end module Options = struct type t type 'a entry = { default : 'a option; min : 'a option; max : 'a option; values : (string * 'a) list; } type flag = [ `Encoding_param | `Decoding_param | `Audio_param | `Video_param | `Subtitle_param | `Export | `Readonly | `Bsf_param | `Runtime_param | `Filtering_param | `Deprecated | `Child_consts ] external int_of_flag : flag -> int = "ocaml_avutil_av_opt_int_of_flag" let flags_of_flags _flags = List.fold_left (fun flags flag -> if _flags land int_of_flag flag = 0 then flags else flag :: flags) [] [ `Encoding_param; `Decoding_param; `Audio_param; `Video_param; `Subtitle_param; `Export; `Readonly; `Bsf_param; `Runtime_param; `Filtering_param; `Deprecated; `Child_consts; ] type spec = [ `Flags of int64 entry | `Int of int entry | `Int64 of int64 entry | `Float of float entry | `Double of float entry | `String of string entry | `Rational of rational entry | `Binary of string entry | `Dict of string entry | `UInt64 of int64 entry | `Image_size of string entry | `Pixel_fmt of Pixel_format.t entry | `Sample_fmt of Sample_format.t entry | `Video_rate of string entry | `Duration of int64 entry | `Color of string entry | `Channel_layout of Channel_layout.t entry | `Bool of bool entry ] type opt = { name : string; help : string option; flags : flag list; spec : spec; } type 'a _entry = { _default : 'a option; _min : 'a option; _max : 'a option } type constant external default_int64 : constant -> int64 = "ocaml_avutil_avopt_default_int64" external default_double : constant -> float = "ocaml_avutil_avopt_default_double" external default_string : constant -> string = "ocaml_avutil_avopt_default_string" type _spec = [ `Constant of constant _entry | `Flags of int64 _entry | `Int of int _entry | `Int64 of int64 _entry | `Float of float _entry | `Double of float _entry | `String of string _entry | `Rational of rational _entry | `Binary of string _entry | `Dict of string _entry | `UInt64 of int64 _entry | `Image_size of string _entry | `Pixel_fmt of Pixel_format.t _entry | `Sample_fmt of Sample_format.t _entry | `Video_rate of string _entry | `Duration of int64 _entry | `Color of string _entry | `Channel_layout of Channel_layout.t _entry | `Bool of bool _entry ] type _opt_cursor type _class_cursor type _cursor = { _opt_cursor : _opt_cursor; _class_cursor : _class_cursor } type _opt = { _name : string; _help : string option; _spec : _spec; _flags : int; _unit : string option; _cursor : _cursor option; } exception Av_opt_iter_not_implemented of _cursor option let () = Callback.register_exception "av_opt_iter_not_implemented" (Av_opt_iter_not_implemented None) external av_opt_iter : _cursor option -> t -> _opt option = "ocaml_avutil_av_opt_iter" let constant_of_opt opt (name, { _default; _ }) = let append fn l = (name, fn (Option.get _default)) :: l in let spec = (* See: https://ffmpeg.org/doxygen/trunk/opt_8c_source.html#l01281 *) match opt.spec with (* Int *) | `Flags ({ values; _ } as spec) -> `Int64 { spec with values = append default_int64 values } | `Int ({ values; _ } as spec) -> `Int { spec with values = append (fun v -> Int64.to_int (default_int64 v)) values; } | `Int64 ({ values; _ } as spec) -> `Int64 { spec with values = append default_int64 values } | `UInt64 ({ values; _ } as spec) -> `UInt64 { spec with values = append default_int64 values } | `Duration ({ values; _ } as spec) -> `Duration { spec with values = append default_int64 values } | `Bool ({ values; _ } as spec) -> `Bool { spec with values = append (fun v -> default_int64 v = 0L) values; } (* Float *) | `Float ({ values; _ } as spec) -> `Float { spec with values = append default_double values } | `Double ({ values; _ } as spec) -> `Double { spec with values = append default_double values } (* Rational *) (* This is surprising but this is the current implementation it looks like. Might be historical. *) | `Rational ({ values; _ } as spec) -> `Rational { spec with values = append (fun v -> av_d2q (default_double v)) values; } (* String *) | `String ({ values; _ } as spec) -> `String { spec with values = append default_string values } | `Video_rate ({ values; _ } as spec) -> `Video_rate { spec with values = append default_string values } | `Color ({ values; _ } as spec) -> `Color { spec with values = append default_string values } | `Image_size ({ values; _ } as spec) -> `Image_size { spec with values = append default_string values } | `Dict ({ values; _ } as spec) -> `Dict { spec with values = append default_string values } (* Other *) | `Channel_layout ({ values; _ } as spec) -> `Channel_layout { spec with values = append (fun v -> Channel_layout.from_id (default_int64 v)) values; } | `Sample_fmt ({ values; _ } as spec) -> `Sample_fmt { spec with values = append (fun v -> Sample_format.find_id (Int64.to_int (default_int64 v))) values; } | `Pixel_fmt ({ values; _ } as spec) -> `Pixel_fmt { spec with values = append (fun v -> Pixel_format.find_id (Int64.to_int (default_int64 v))) values; } | _ -> failwith "Incompatible constant!" in { opt with spec } let opts v = let constants = Hashtbl.create 10 in let opt_of_opt { _name; _help; _spec; _flags; _unit; _ } = let spec = match _spec with | `Flags { _default; _min; _max } -> `Flags { default = _default; min = _min; max = _max; values = [] } | `Int { _default; _min; _max; _ } -> `Int { default = _default; min = _min; max = _max; values = [] } | `Int64 { _default; _min; _max; _ } -> `Int64 { default = _default; min = _min; max = _max; values = [] } | `Float { _default; _min; _max; _ } -> `Float { default = _default; min = _min; max = _max; values = [] } | `Double { _default; _min; _max; _ } -> `Double { default = _default; min = _min; max = _max; values = [] } | `String { _default; _min; _max; _ } -> `String { default = _default; min = _min; max = _max; values = [] } | `Rational { _default; _min; _max; _ } -> `Rational { default = _default; min = _min; max = _max; values = [] } | `Binary { _default; _min; _max; _ } -> `Binary { default = _default; min = _min; max = _max; values = [] } | `Dict { _default; _min; _max; _ } -> `Dict { default = _default; min = _min; max = _max; values = [] } | `UInt64 { _default; _min; _max; _ } -> `UInt64 { default = _default; min = _min; max = _max; values = [] } | `Image_size { _default; _min; _max; _ } -> `Image_size { default = _default; min = _min; max = _max; values = [] } | `Pixel_fmt { _default; _min; _max; _ } -> `Pixel_fmt { default = _default; min = _min; max = _max; values = [] } | `Sample_fmt { _default; _min; _max; _ } -> `Sample_fmt { default = _default; min = _min; max = _max; values = [] } | `Video_rate { _default; _min; _max; _ } -> `Video_rate { default = _default; min = _min; max = _max; values = [] } | `Duration { _default; _min; _max; _ } -> `Duration { default = _default; min = _min; max = _max; values = [] } | `Color { _default; _min; _max; _ } -> `Color { default = _default; min = _min; max = _max; values = [] } | `Channel_layout { _default; _min; _max; _ } -> `Channel_layout { default = _default; min = _min; max = _max; values = [] } | `Bool { _default; _min; _max; _ } -> `Bool { default = _default; min = _min; max = _max; values = [] } | `Constant _ -> assert false in let opt = { name = _name; help = _help; flags = flags_of_flags _flags; spec } in match _unit with | Some u when Hashtbl.mem constants u -> List.fold_left constant_of_opt opt (Hashtbl.find_all constants u) | _ -> opt in let rec f _cursor _opts = match av_opt_iter _cursor v with | None -> List.map opt_of_opt _opts | Some { _name; _spec = `Constant s; _cursor; _unit; _ } -> Hashtbl.add constants (Option.get _unit) (_name, s); f _cursor _opts | Some _opt -> f _opt._cursor (_opt :: _opts) | exception Av_opt_iter_not_implemented _cursor -> f _cursor _opts in f None [] (* The type implementation is a tuple [(C object, OCaml value)]. OCaml value is passed to make sure that the C object is not collected by the GC while running the function. *) type obj type 'a getter = ?search_children:bool -> name:string -> obj -> 'a external get : 'a -> ?search_children:bool -> name:string -> 'b -> 'c = "ocaml_avutil_get_opt" let get (type a) _type ?search_children ~name (obj : obj) : a = let c, o = Obj.magic obj in let ret = get _type ?search_children ~name c in ignore o; ret let get_string = get `String let get_int = get `Int let get_int64 = get `Int64 let get_float = get `Float let get_rational = get `Rational let get_image_size = get `Image_size let get_pixel_fmt = get `Pixel_fmt let get_sample_fmt = get `Sample_fmt let get_video_rate = get `Video_rate let get_channel_layout = get `Channel_layout let get_dictionary ?search_children ~name obj = Array.to_list (get `Dict ?search_children ~name obj) end (* Options *) type value = [ `String of string | `Int of int | `Int64 of int64 | `Float of float ] type opts = (string, value) Hashtbl.t let _opt_val = function | `String s -> s | `Int i -> string_of_int i | `Int64 i -> Int64.to_string i | `Float f -> string_of_float f let opts_default = function None -> Hashtbl.create 0 | Some opts -> opts let mk_opts_array opts = Array.of_list (Hashtbl.fold (fun opt_name opt_val cur -> (opt_name, _opt_val opt_val) :: cur) opts []) let string_of_opts opts = Hashtbl.fold (fun opt_name opt_val l -> (opt_name ^ "=" ^ _opt_val opt_val) :: l) opts [] |> String.concat "," let on_opt v fn = match v with None -> () | Some v -> fn v let add_audio_opts ?channels ?channel_layout ~sample_rate ~sample_format ~time_base opts = Hashtbl.add opts "ar" (`Int sample_rate); on_opt channels (fun channels -> Hashtbl.add opts "ac" (`Int channels)); on_opt channel_layout (fun channel_layout -> Hashtbl.add opts "channel_layout" (`Int64 (Channel_layout.get_id channel_layout))); Hashtbl.add opts "sample_fmt" (`Int (Sample_format.get_id sample_format)); Hashtbl.add opts "time_base" (`String (string_of_rational time_base)) let mk_audio_opts ?opts ?channels ?channel_layout ~sample_rate ~sample_format ~time_base () = let () = match (channels, channel_layout) with | None, None -> raise (Error (`Failure "At least one of channels or channel_layout must be passed!")) | _ -> () in let opts = opts_default opts in add_audio_opts ?channels ?channel_layout ~sample_rate ~sample_format ~time_base opts; opts let add_video_opts ?frame_rate ~pixel_format ~width ~height ~time_base opts = Hashtbl.add opts "pixel_format" (`Int (Pixel_format.get_id pixel_format)); Hashtbl.add opts "video_size" (`String (Printf.sprintf "%dx%d" width height)); Hashtbl.add opts "time_base" (`String (string_of_rational time_base)); match frame_rate with | Some r -> Hashtbl.add opts "r" (`String (string_of_rational r)) | None -> () let mk_video_opts ?opts ?frame_rate ~pixel_format ~width ~height ~time_base () = let opts = opts_default opts in add_video_opts ?frame_rate ~pixel_format ~width ~height ~time_base opts; opts let filter_opts unused opts = Hashtbl.filter_map_inplace (fun k v -> if Array.mem k unused then Some v else None) opts module HwContext = struct type device_type = Hw_device_type.t type device_context type frame_context external create_device_context : device_type -> string -> (string * string) array -> device_context * string array = "ocaml_avutil_create_device_context" let create_device_context ?(device = "") ?opts device_type = let opts = opts_default opts in let ret, unused = create_device_context device_type device (mk_opts_array opts) in filter_opts unused opts; ret external create_frame_context : width:int -> height:int -> src_pixel_format:Pixel_format.t -> dst_pixel_format:Pixel_format.t -> device_context -> frame_context = "ocaml_avutil_create_frame_context" end ocaml-ffmpeg-1.1.11/avutil/avutil.mli000066400000000000000000000315461457634536500174760ustar00rootroot00000000000000(** Common code shared across all FFmpeg libraries. *) (** {1 Line} *) type input type output (** {1 Container} *) type 'a container (** {1 Media types} *) type audio = [ `Audio ] type video = [ `Video ] type subtitle = [ `Subtitle ] type media_type = Media_types.t (** {1 Format} *) type ('line, 'media) format (** {1 Frame} *) module Frame : sig type 'media t (** [Avutil.Frame.pts frame] returns the presentation timestamp in time_base units (time when frame should be shown to user). *) val pts : _ t -> Int64.t option (** [Avutil.Frame.set_pts frame pts] sets the presentation time for this frame. *) val set_pts : _ t -> Int64.t option -> unit (** [Avutil.Frame.duration frame] returns the frame duration in time_base, when available. *) val duration : _ t -> Int64.t option (** [Avutil.Frame.set_duration frame duration] sets the frame duration. *) val set_duration : _ t -> Int64.t option -> unit (** [Avutil.Frame.pkt_dts frame] returns DTS copied from the AVPacket that triggered returning this frame. *) val pkt_dts : _ t -> Int64.t option (** [Avutil.Frame.set_pkt_dts frame dts] sets pkt_dts value for this frame. *) val set_pkt_dts : _ t -> Int64.t option -> unit (** [Avutil.Frame.metadata frame] returns the frame's metadata. *) val metadata : _ t -> (string * string) list (** [Avutil.Frame.set_metadata frame metadata] sets the frame's metadata. *) val set_metadata : _ t -> (string * string) list -> unit (** [Avutil.frame_best_effort_timestamp frame] returns the frame timestamp estimated using various heuristics, in stream time base *) val best_effort_timestamp : _ t -> Int64.t option (** duration of the corresponding packet, expressed in AVStream->time_base units. *) val pkt_duration : _ t -> Int64.t option (** [Avutil.frame_copy src dst] copies data from [src] into [dst] *) val copy : 'a t -> 'b t -> unit end type 'media frame = 'media Frame.t (** {1 Exception} *) (** Internal errors. *) type error = [ `Bsf_not_found | `Decoder_not_found | `Demuxer_not_found | `Encoder_not_found | `Eof | `Exit | `Filter_not_found | `Invalid_data | `Muxer_not_found | `Option_not_found | `Patch_welcome | `Protocol_not_found | `Stream_not_found | `Bug | `Eagain | `Unknown | `Experimental | `Other of int | (* `Failure is for errors from the binding code itself. *) `Failure of string ] exception Error of error val string_of_error : error -> string type data = (int, Bigarray.int8_unsigned_elt, Bigarray.c_layout) Bigarray.Array1.t val create_data : int -> data type rational = { num : int; den : int } val string_of_rational : rational -> string (** {5 Constants} *) val qp2lambda : int (** {5 Timestamp} *) (** Formats for time. *) module Time_format : sig (** Time formats. *) type t = [ `Second | `Millisecond | `Microsecond | `Nanosecond ] end (** Return the time base of FFmpeg. *) val time_base : unit -> rational (** {5 Logging utilities} *) module Log : sig type level = [ `Quiet | `Panic | `Fatal | `Error | `Warning | `Info | `Verbose | `Debug | `Trace ] val set_level : level -> unit val set_callback : (string -> unit) -> unit val clear_callback : unit -> unit end (** {5 Audio utilities} *) (** Formats for channels layouts. *) module Channel_layout : sig (** Channel layout formats. *) type t = Channel_layout.t (** Return a channel layout id that matches name. Raises [Not_found] otherwise. name can be one or several of the following notations, separated by '+' or '|': - the name of an usual channel layout (mono, stereo, 4.0, quad, 5.0, 5.0(side), 5.1, 5.1(side), 7.1, 7.1(wide), downmix); - the name of a single channel (FL, FR, FC, LFE, BL, BR, FLC, FRC, BC, SL, SR, TC, TFL, TFC, TFR, TBL, TBC, TBR, DL, DR); - a number of channels, in decimal, optionally followed by 'c', yielding the default channel layout for that number of channels; - a channel layout mask, in hexadecimal starting with "0x" (see the AV_CH_* macros). *) val find : string -> t (** Return a description of the channel layout. *) val get_description : ?channels:int -> t -> string (** Return the number of channels in the channel layout. *) val get_nb_channels : t -> int (** Return default channel layout for a given number of channels. Raises [Not_found] if not found. *) val get_default : int -> t (** Return the internal ID for a channel layout. This number should be passed as the "channel_layout" [opts] in [Av.new_audio_stream] .*) val get_id : t -> int64 end (** Formats for audio samples. *) module Sample_format : sig (** Audio sample formats. *) type t = Sample_format.t (** Return the name of the sample format if it exists. *) val get_name : t -> string option (** Find a sample format by its name. Raises [Not_found] when none exist. *) val find : string -> t (** Return the internal ID of the sample format. *) val get_id : t -> int (** Find a sample format from its ID. Raises [Not_found] when none exist. *) val find_id : int -> t end (** {5 Video utilities} *) (** Formats for pixels. *) module Pixel_format : sig (** Pixels formats. *) type t = Pixel_format.t (** Pixel format flags. *) type flag = Pixel_format_flag.t (** Pixel format component descriptor *) type component_descriptor = { plane : int; step : int; offset : int; shift : int; depth : int; } (** Pixel format descriptor. *) type descriptor = private { name : string; nb_components : int; log2_chroma_w : int; log2_chroma_h : int; flags : flag list; comp : component_descriptor list; alias : string option; } (** Return the pixel's format descriptor. Raises [Not_found] if descriptor could not be found. *) val descriptor : t -> descriptor (** Return the number of bits of the pixel format. *) val bits : descriptor -> int (** Return the number of planes of the pixel format. *) val planes : t -> int (** [Pixel_format.to_string f] Return a string representation of the pixel format [f] if it exists *) val to_string : t -> string option (** [Pixel_format.of_string s] Convert the string [s] into a [Pixel_format.t]. Raises Error if [s] is not a valid format. *) val of_string : string -> t (** Return the internal ID of the pixel format. *) val get_id : t -> int (** Find a sample pixel from its ID. Raises [Not_found] when none exist. *) val find_id : int -> t end module Audio : sig (** [Avutil.Audio.create_frame sample_format channel_layout sample_rate samples] allocates a new audio frame. *) val create_frame : Sample_format.t -> Channel_layout.t -> int -> int -> audio frame (** [Avutil.Audio.frame_get_sample_format frame] returns the sample format of the current frame. *) val frame_get_sample_format : audio frame -> Sample_format.t (** [Avutil.Audio.frame_get_sample_rate frame] returns the sample rate of the current frame. *) val frame_get_sample_rate : audio frame -> int (** [Avutil.Audio.frame_get_channels frame] returns the number of audio channels in the current frame. *) val frame_get_channels : audio frame -> int (** [Avutil.Audio.frame_get_channel_layout frame] returns the channel layout for the current frame. *) val frame_get_channel_layout : audio frame -> Channel_layout.t (** [Avutil.Audio.frame_nb_samples frame] returns the number of audio samples per channel in the current frame. *) val frame_nb_samples : audio frame -> int (** [Abutil.Audio.frame_copy_samples src src_offset dst dst_offset len] copies [len] samples from [src] starting at position [src_offset] into [dst] starting at position [dst_offset]. *) val frame_copy_samples : audio frame -> int -> audio frame -> int -> int -> unit end module Video : sig type planes = (data * int) array (** [Avutil.Video.create_frame w h pf] create a video frame with [w] width, [h] height and [pf] pixel format. Raises Error if the allocation failed. *) val create_frame : int -> int -> Pixel_format.t -> video frame (** [Avutil.Video.frame_get_linesize vf n] return the line size of the [n] plane of the [vf] video frame. Raises Error if [n] is out of boundaries. *) val frame_get_linesize : video frame -> int -> int (** [Avutil.Video.frame_visit ~make_writable:wrt f vf] call the [f] function with planes wrapping the [vf] video frame data. The make_writable:[wrt] parameter must be set to true if the [f] function writes in the planes. Access to the frame through the planes is safe as long as it occurs in the [f] function and the frame is not sent to an encoder. The same frame is returned for convenience. Raises Error if the make frame writable operation failed. *) val frame_visit : make_writable:bool -> (planes -> unit) -> video frame -> video frame (** [Avutil.Video.frame_get_width frame] returns the frame width *) val frame_get_width : video frame -> int (** [Avutil.Video.frame_get_height frame] returns the frame height *) val frame_get_height : video frame -> int (** [Avutil.Video.frame_get_pixel_format frame] returns frame's pixel format. *) val frame_get_pixel_format : video frame -> Pixel_format.t (** [Avutil.Video.frame_get_pixel_aspect frame] returns the frame's pixel aspect. *) val frame_get_pixel_aspect : video frame -> rational option end (** {5 Subtitle utilities} *) module Subtitle : sig (** Return the time base for subtitles. *) val time_base : unit -> rational (** [Avutil.Subtitle.create_frame start end lines] create a subtitle frame from [lines] which is displayed at [start] time and hidden at [end] time in seconds. Raises Error if the allocation failed. *) val create_frame : float -> float -> string list -> subtitle frame (** Convert subtitle frame to lines. The two float are the start and the end dislpay time in seconds. *) val frame_to_lines : subtitle frame -> float * float * string list end (** {5 Options} *) module Options : sig type t type flag = [ `Encoding_param | `Decoding_param | `Audio_param | `Video_param | `Subtitle_param | `Export | `Readonly | `Bsf_param | `Runtime_param | `Filtering_param | `Deprecated | `Child_consts ] type 'a entry = { default : 'a option; (* Used only for numerical options. *) min : 'a option; max : 'a option; (* Pre-defined options. *) values : (string * 'a) list; } type spec = [ `Flags of int64 entry | `Int of int entry | `Int64 of int64 entry | `Float of float entry | `Double of float entry | `String of string entry | `Rational of rational entry | `Binary of string entry | `Dict of string entry | `UInt64 of int64 entry | `Image_size of string entry | `Pixel_fmt of Pixel_format.t entry | `Sample_fmt of Sample_format.t entry | `Video_rate of string entry | `Duration of int64 entry | `Color of string entry | `Channel_layout of Channel_layout.t entry | `Bool of bool entry ] type opt = { name : string; help : string option; flags : flag list; spec : spec; } val opts : t -> opt list (* Generic type for any object with options. *) type obj type 'a getter = ?search_children:bool -> name:string -> obj -> 'a val get_string : string getter val get_int : int getter val get_int64 : int64 getter val get_float : float getter val get_rational : rational getter val get_image_size : (int * int) getter val get_pixel_fmt : Pixel_format.t getter val get_sample_fmt : Sample_format.t getter val get_video_rate : rational getter val get_channel_layout : Channel_layout.t getter val get_dictionary : (string * string) list getter end (* {1 Options } *) type value = [ `String of string | `Int of int | `Int64 of int64 | `Float of float ] type opts = (string, value) Hashtbl.t val opts_default : opts option -> opts val mk_opts_array : opts -> (string * string) array val string_of_opts : opts -> string val mk_audio_opts : ?opts:opts -> ?channels:int -> ?channel_layout:Channel_layout.t -> sample_rate:int -> sample_format:Sample_format.t -> time_base:rational -> unit -> opts val mk_video_opts : ?opts:opts -> ?frame_rate:rational -> pixel_format:Pixel_format.t -> width:int -> height:int -> time_base:rational -> unit -> opts val filter_opts : string array -> opts -> unit (** {5 HwContext} *) module HwContext : sig (** Codec hardward device type. *) type device_type = Hw_device_type.t (** Device context. *) type device_context (** Frame context. *) type frame_context val create_device_context : ?device:string -> ?opts:opts -> device_type -> device_context val create_frame_context : width:int -> height:int -> src_pixel_format:Pixel_format.t -> dst_pixel_format:Pixel_format.t -> device_context -> frame_context end ocaml-ffmpeg-1.1.11/avutil/avutil_stubs.c000066400000000000000000001263131457634536500203540ustar00rootroot00000000000000#include #include #include #define CAML_NAME_SPACE 1 #include #include #include #include #include #include #include #include #include #include #include #include #include "avutil_stubs.h" #include "channel_layout_stubs.h" #include "hw_device_type_stubs.h" #include "pixel_format_flag_stubs.h" #include "pixel_format_stubs.h" #include "sample_format_stubs.h" char ocaml_av_exn_msg[ERROR_MSG_SIZE + 1]; void ocaml_avutil_raise_error(int err) { value _err; switch (err) { case AVERROR_BSF_NOT_FOUND: _err = PVV_Bsf_not_found; break; case AVERROR_DECODER_NOT_FOUND: _err = PVV_Decoder_not_found; break; case AVERROR_DEMUXER_NOT_FOUND: _err = PVV_Demuxer_not_found; break; case AVERROR_ENCODER_NOT_FOUND: _err = PVV_Encoder_not_found; break; case AVERROR_EOF: _err = PVV_Eof; break; case AVERROR_EXIT: _err = PVV_Exit; break; case AVERROR_FILTER_NOT_FOUND: _err = PVV_Filter_not_found; break; case AVERROR_INVALIDDATA: _err = PVV_Invalid_data; break; case AVERROR_MUXER_NOT_FOUND: _err = PVV_Muxer_not_found; break; case AVERROR_OPTION_NOT_FOUND: _err = PVV_Option_not_found; break; case AVERROR_PATCHWELCOME: _err = PVV_Patch_welcome; break; case AVERROR_PROTOCOL_NOT_FOUND: _err = PVV_Protocol_not_found; break; case AVERROR_STREAM_NOT_FOUND: _err = PVV_Stream_not_found; break; case AVERROR_BUG: _err = PVV_Bug; break; case AVERROR(EAGAIN): _err = PVV_Eagain; break; case AVERROR_UNKNOWN: _err = PVV_Unknown; break; case AVERROR_EXPERIMENTAL: _err = PVV_Experimental; break; default: _err = caml_alloc_tuple(2); Store_field(_err, 0, PVV_Other); Store_field(_err, 1, Val_int(err)); } caml_raise_with_arg(*caml_named_value(EXN_ERROR), _err); } CAMLprim value ocaml_avutil_qp2lambda(value unit) { CAMLparam0(); CAMLreturn(Val_int(FF_QP2LAMBDA)); } CAMLprim value ocaml_avutil_string_of_error(value error) { CAMLparam0(); int err; switch (error) { case PVV_Bsf_not_found: err = AVERROR_BSF_NOT_FOUND; break; case PVV_Decoder_not_found: err = AVERROR_DECODER_NOT_FOUND; break; case PVV_Demuxer_not_found: err = AVERROR_DEMUXER_NOT_FOUND; break; case PVV_Encoder_not_found: err = AVERROR_ENCODER_NOT_FOUND; break; case PVV_Eof: err = AVERROR_EOF; break; case PVV_Exit: err = AVERROR_EXIT; break; case PVV_Filter_not_found: err = AVERROR_FILTER_NOT_FOUND; break; case PVV_Invalid_data: err = AVERROR_INVALIDDATA; break; case PVV_Muxer_not_found: err = AVERROR_MUXER_NOT_FOUND; break; case PVV_Option_not_found: err = AVERROR_OPTION_NOT_FOUND; break; case PVV_Patch_welcome: err = AVERROR_PATCHWELCOME; break; case PVV_Protocol_not_found: err = AVERROR_PROTOCOL_NOT_FOUND; break; case PVV_Stream_not_found: err = AVERROR_STREAM_NOT_FOUND; break; case PVV_Bug: err = AVERROR_BUG; break; case PVV_Eagain: err = AVERROR(EAGAIN); break; case PVV_Unknown: err = AVERROR_UNKNOWN; break; case PVV_Experimental: err = AVERROR_EXPERIMENTAL; break; default: if (Field(error, 0) == PVV_Other) err = Int_val(Field(error, 1)); else // Failure CAMLreturn(Field(error, 1)); } CAMLreturn(caml_copy_string(av_err2str(err))); } /***** Global initialisation *****/ static pthread_key_t ocaml_c_thread_key; static pthread_once_t ocaml_c_thread_key_once = PTHREAD_ONCE_INIT; static void ocaml_ffmpeg_on_thread_exit(void *key) { caml_c_thread_unregister(); } static void ocaml_ffmpeg_make_key() { pthread_key_create(&ocaml_c_thread_key, ocaml_ffmpeg_on_thread_exit); } void ocaml_ffmpeg_register_thread() { static int initialized = 1; pthread_once(&ocaml_c_thread_key_once, ocaml_ffmpeg_make_key); if (caml_c_thread_register() && !pthread_getspecific(ocaml_c_thread_key)) pthread_setspecific(ocaml_c_thread_key, (void *)&initialized); } /**** Rational ****/ void value_of_rational(const AVRational *rational, value *pvalue) { *pvalue = caml_alloc_tuple(2); Store_field(*pvalue, 0, Val_int(rational->num)); Store_field(*pvalue, 1, Val_int(rational->den)); } value ocaml_avutil_av_d2q(value f) { CAMLparam1(f); CAMLlocal1(ret); const AVRational r = av_d2q(Double_val(f), INT_MAX); value_of_rational(&r, &ret); CAMLreturn(ret); } /**** Time format ****/ int64_t second_fractions_of_time_format(value time_format) { switch (time_format) { case PVV_Second: return 1; case PVV_Millisecond: return 1000; case PVV_Microsecond: return 1000000; case PVV_Nanosecond: return 1000000000; default: break; } return 1; } /**** Logging ****/ CAMLprim value ocaml_avutil_set_log_level(value level) { CAMLparam0(); av_log_set_level(Int_val(level)); CAMLreturn(Val_unit); } #define LINE_SIZE 1024 typedef struct { char msg[LINE_SIZE]; void *next; } log_msg_t; static pthread_cond_t log_condition = PTHREAD_COND_INITIALIZER; static pthread_mutex_t log_mutex = PTHREAD_MUTEX_INITIALIZER; static log_msg_t top_level_log_msg = {"", NULL}; CAMLprim value ocaml_ffmpeg_process_log(value cb) { CAMLparam1(cb); CAMLlocal1(buffer); log_msg_t *log_msg, *next_log_msg; while (1) { caml_release_runtime_system(); pthread_mutex_lock(&log_mutex); while (top_level_log_msg.next == NULL) pthread_cond_wait(&log_condition, &log_mutex); log_msg = top_level_log_msg.next; top_level_log_msg.next = NULL; pthread_mutex_unlock(&log_mutex); caml_acquire_runtime_system(); while (log_msg != NULL) { buffer = caml_copy_string(log_msg->msg); caml_callback(cb, buffer); next_log_msg = log_msg->next; free(log_msg); log_msg = next_log_msg; } } CAMLreturn(Val_unit); } static void av_log_ocaml_callback(void *ptr, int level, const char *fmt, va_list vl) { static int print_prefix = 1; log_msg_t *log_msg; if (level > av_log_get_level()) return; pthread_mutex_lock(&log_mutex); log_msg = &top_level_log_msg; while (log_msg->next != NULL) { log_msg = log_msg->next; } // TODO: check for NULL here log_msg->next = malloc(sizeof(log_msg_t)); log_msg = (log_msg_t *)log_msg->next; log_msg->next = NULL; av_log_format_line2(ptr, level, fmt, vl, log_msg->msg, LINE_SIZE, &print_prefix); pthread_cond_signal(&log_condition); pthread_mutex_unlock(&log_mutex); } CAMLprim value ocaml_avutil_setup_log_callback(value unit) { CAMLparam0(); caml_release_runtime_system(); av_log_set_callback(&av_log_ocaml_callback); caml_acquire_runtime_system(); CAMLreturn(Val_unit); } CAMLprim value ocaml_avutil_clear_log_callback() { CAMLparam0(); caml_release_runtime_system(); av_log_set_callback(&av_log_default_callback); caml_acquire_runtime_system(); CAMLreturn(Val_unit); } CAMLprim value ocaml_avutil_time_base() { CAMLparam0(); CAMLlocal1(ans); value_of_rational(&AV_TIME_BASE_Q, &ans); CAMLreturn(ans); } /**** Channel layout ****/ CAMLprim value ocaml_avutil_get_channel_layout_description( value _channel_layout, value channels) { CAMLparam1(_channel_layout); char buf[1024]; uint64_t channel_layout = ChannelLayout_val(_channel_layout); av_get_channel_layout_string(buf, sizeof(buf), Int_val(channels), channel_layout); CAMLreturn(caml_copy_string(buf)); } CAMLprim value ocaml_avutil_get_channel_layout_nb_channels(value _channel_layout) { CAMLparam1(_channel_layout); CAMLreturn(Val_int( av_get_channel_layout_nb_channels(ChannelLayout_val(_channel_layout)))); } CAMLprim value ocaml_avutil_get_default_channel_layout(value _nb_channels) { CAMLparam0(); int64_t ret = av_get_default_channel_layout(Int_val(_nb_channels)); if (ret == 0) caml_raise_not_found(); CAMLreturn(Val_ChannelLayout(ret)); } CAMLprim value ocaml_avutil_get_channel_layout(value _name) { CAMLparam1(_name); char *name = strndup(String_val(_name), caml_string_length(_name)); if (!name) caml_raise_out_of_memory(); int64_t ret = av_get_channel_layout(name); free(name); if (ret == 0) caml_raise_not_found(); CAMLreturn(Val_ChannelLayout(ret)); } CAMLprim value ocaml_avutil_get_channel_layout_id(value _channel_layout) { CAMLparam1(_channel_layout); CAMLreturn(caml_copy_int64(ChannelLayout_val(_channel_layout))); } CAMLprim value ocaml_avutil_channel_layout_of_id(value v) { CAMLparam0(); CAMLreturn(Val_ChannelLayout(Int64_val(v))); } /**** Sample format ****/ static const enum AVSampleFormat SAMPLE_FORMATS[] = { AV_SAMPLE_FMT_NONE, AV_SAMPLE_FMT_U8, AV_SAMPLE_FMT_S16, AV_SAMPLE_FMT_S32, AV_SAMPLE_FMT_FLT, AV_SAMPLE_FMT_DBL, AV_SAMPLE_FMT_U8P, AV_SAMPLE_FMT_S16P, AV_SAMPLE_FMT_S32P, AV_SAMPLE_FMT_FLTP, AV_SAMPLE_FMT_DBLP}; #define SAMPLE_FORMATS_LEN \ (sizeof(SAMPLE_FORMATS) / sizeof(enum AVSampleFormat)) static const enum caml_ba_kind BIGARRAY_KINDS[SAMPLE_FORMATS_LEN] = { CAML_BA_KIND_MASK, CAML_BA_UINT8, CAML_BA_SINT16, CAML_BA_INT32, CAML_BA_FLOAT32, CAML_BA_FLOAT64, CAML_BA_UINT8, CAML_BA_SINT16, CAML_BA_INT32, CAML_BA_FLOAT32, CAML_BA_FLOAT64}; enum caml_ba_kind bigarray_kind_of_AVSampleFormat(enum AVSampleFormat sf) { int i; for (i = 0; i < SAMPLE_FORMATS_LEN; i++) { if (sf == SAMPLE_FORMATS[i]) return BIGARRAY_KINDS[i]; } return CAML_BA_KIND_MASK; } CAMLprim value ocaml_avutil_find_sample_fmt(value _name) { CAMLparam1(_name); CAMLlocal1(ans); char *name = strndup(String_val(_name), caml_string_length(_name)); if (!name) caml_raise_out_of_memory(); enum AVSampleFormat ret = av_get_sample_fmt(name); free(name); if (ret == AV_SAMPLE_FMT_NONE) caml_raise_not_found(); CAMLreturn(Val_SampleFormat(ret)); } CAMLprim value ocaml_avutil_get_sample_fmt_name(value _sample_fmt) { CAMLparam1(_sample_fmt); CAMLlocal1(ans); enum AVSampleFormat sample_fmt = SampleFormat_val(_sample_fmt); if (sample_fmt == AV_SAMPLE_FMT_NONE) CAMLreturn(Val_none); const char *name = av_get_sample_fmt_name(SampleFormat_val(_sample_fmt)); if (!name) CAMLreturn(Val_none); ans = caml_alloc_tuple(1); Store_field(ans, 0, caml_copy_string(name)); CAMLreturn(ans); } CAMLprim value ocaml_avutil_get_sample_fmt_id(value _sample_fmt) { CAMLparam1(_sample_fmt); CAMLreturn(Val_int(SampleFormat_val(_sample_fmt))); } CAMLprim value ocaml_avutil_find_sample_fmt_from_id(value _id) { CAMLparam0(); value ret = Val_SampleFormat(Int_val(_id)); CAMLreturn(ret); } /***** AVPixelFormat *****/ CAMLprim value ocaml_avutil_pixelformat_descriptor(value pixel) { CAMLparam1(pixel); CAMLlocal4(ret, tmp1, tmp2, cons); enum AVPixelFormat p = PixelFormat_val(pixel); const AVPixFmtDescriptor *pixdesc = av_pix_fmt_desc_get(p); AVComponentDescriptor comp_desc; int i, n; if (!pixdesc) caml_raise_not_found(); ret = caml_alloc_tuple(8); Store_field(ret, 0, caml_copy_string(pixdesc->name)); Store_field(ret, 1, Val_int(pixdesc->nb_components)); Store_field(ret, 2, Val_int(pixdesc->log2_chroma_w)); Store_field(ret, 3, Val_int(pixdesc->log2_chroma_h)); n = 0; for (i = 0; i < AV_PIX_FMT_FLAG_T_TAB_LEN; i++) { if (pixdesc->flags & AV_PIX_FMT_FLAG_T_TAB[i][1]) n++; } if (n == 0) Store_field(ret, 4, Val_int(0)); else { cons = Val_int(0); for (i = 0; i < AV_PIX_FMT_FLAG_T_TAB_LEN; i++) { if (pixdesc->flags & AV_PIX_FMT_FLAG_T_TAB[i][1]) { tmp1 = caml_alloc(2, 0); Store_field(tmp1, 0, AV_PIX_FMT_FLAG_T_TAB[i][0]); Store_field(tmp1, 1, cons); cons = tmp1; } } Store_field(ret, 4, tmp1); } cons = Val_int(0); for (i = 3; i >= 0; i--) { comp_desc = pixdesc->comp[i]; tmp2 = caml_alloc_tuple(5); Store_field(tmp2, 0, comp_desc.plane); Store_field(tmp2, 1, comp_desc.step); Store_field(tmp2, 2, comp_desc.offset); Store_field(tmp2, 3, comp_desc.shift); Store_field(tmp2, 4, comp_desc.depth); tmp1 = caml_alloc(2, 0); Store_field(tmp1, 0, tmp2); Store_field(tmp1, 1, cons); cons = tmp1; } Store_field(ret, 5, tmp1); if (pixdesc->alias) { tmp1 = caml_alloc_tuple(1); Store_field(tmp1, 0, caml_copy_string(pixdesc->alias)); Store_field(ret, 6, tmp1); } else Store_field(ret, 6, Val_none); Store_field(ret, 7, value_of_avpixfmtdescriptor(tmp1, pixdesc)); CAMLreturn(ret); } CAMLprim value ocaml_avutil_pixelformat_bits_per_pixel(value d) { CAMLparam0(); const AVPixFmtDescriptor *pixdesc = AvPixFmtDescriptor_val(Field(d, 7)); CAMLreturn(Val_int(av_get_bits_per_pixel(pixdesc))); } CAMLprim value ocaml_avutil_pixelformat_planes(value pixel) { CAMLparam1(pixel); enum AVPixelFormat p = PixelFormat_val(pixel); CAMLreturn(Val_int(av_pix_fmt_count_planes(p))); } CAMLprim value ocaml_avutil_get_pixel_fmt_id(value _pixel_fmt) { CAMLparam1(_pixel_fmt); CAMLreturn(Val_int(PixelFormat_val(_pixel_fmt))); } CAMLprim value ocaml_avutil_find_pixel_fmt_from_id(value _id) { CAMLparam0(); value ret = Val_PixelFormat(Int_val(_id)); CAMLreturn(ret); } CAMLprim value ocaml_avutil_pixelformat_to_string(value pixel) { CAMLparam1(pixel); CAMLlocal1(ret); enum AVPixelFormat p = PixelFormat_val(pixel); if (p == AV_PIX_FMT_NONE) CAMLreturn(Val_none); const char *name = av_get_pix_fmt_name(p); if (!name) CAMLreturn(Val_none); ret = caml_alloc_tuple(1); Store_field(ret, 0, caml_copy_string(name)); CAMLreturn(ret); } CAMLprim value ocaml_avutil_pixelformat_of_string(value name) { CAMLparam1(name); enum AVPixelFormat p = av_get_pix_fmt(String_val(name)); if (p == AV_PIX_FMT_NONE) Fail("Invalid format name"); CAMLreturn(Val_PixelFormat(p)); } /***** AVFrame *****/ static void finalize_frame(value v) { AVFrame *frame = Frame_val(v); av_frame_free(&frame); } static struct custom_operations frame_ops = { "ocaml_avframe", finalize_frame, custom_compare_default, custom_hash_default, custom_serialize_default, custom_deserialize_default}; value value_of_frame(AVFrame *frame) { value ret; if (!frame) Fail("Empty frame"); int size = 0; int n = 0; while (n < AV_NUM_DATA_POINTERS && frame->buf[n] != NULL) { size += frame->buf[n]->size; n++; } ret = caml_alloc_custom_mem(&frame_ops, sizeof(AVFrame *), size); Frame_val(ret) = frame; return ret; } CAMLprim value ocaml_avutil_frame_pkt_duration(value _frame) { CAMLparam1(_frame); CAMLlocal1(ret); AVFrame *frame = Frame_val(_frame); if (frame->pkt_duration == 0) CAMLreturn(Val_none); ret = caml_alloc_tuple(1); Store_field(ret, 0, caml_copy_int64(frame->pkt_duration)); CAMLreturn(ret); } CAMLprim value ocaml_avutil_frame_pts(value _frame) { CAMLparam1(_frame); CAMLlocal1(ret); AVFrame *frame = Frame_val(_frame); if (frame->pts == AV_NOPTS_VALUE) CAMLreturn(Val_none); ret = caml_alloc_tuple(1); Store_field(ret, 0, caml_copy_int64(frame->pts)); CAMLreturn(ret); } CAMLprim value ocaml_avutil_frame_set_pts(value _frame, value _pts) { CAMLparam2(_frame, _pts); AVFrame *frame = Frame_val(_frame); if (_pts == Val_none) frame->pts = AV_NOPTS_VALUE; else frame->pts = Int64_val(Field(_pts, 0)); CAMLreturn(Val_unit); } CAMLprim value ocaml_avutil_frame_duration(value _frame) { CAMLparam1(_frame); #if LIBAVUTIL_VERSION_INT >= AV_VERSION_INT(57, 30, 100) CAMLlocal1(ret); AVFrame *frame = Frame_val(_frame); if (frame->duration == 0) CAMLreturn(Val_none); ret = caml_alloc_tuple(1); Store_field(ret, 0, caml_copy_int64(frame->duration)); CAMLreturn(ret); #else CAMLreturn(Val_none); #endif } CAMLprim value ocaml_avutil_frame_set_duration(value _frame, value _duration) { CAMLparam2(_frame, _duration); #if LIBAVUTIL_VERSION_INT >= AV_VERSION_INT(57, 30, 100) AVFrame *frame = Frame_val(_frame); if (_duration == Val_none) frame->duration = 0; else frame->duration = Int64_val(Field(_duration, 0)); #endif CAMLreturn(Val_unit); } CAMLprim value ocaml_avutil_frame_pkt_dts(value _frame) { CAMLparam1(_frame); CAMLlocal1(ret); AVFrame *frame = Frame_val(_frame); if (frame->pkt_dts == AV_NOPTS_VALUE) CAMLreturn(Val_none); ret = caml_alloc_tuple(1); Store_field(ret, 0, caml_copy_int64(frame->pkt_dts)); CAMLreturn(ret); } CAMLprim value ocaml_avutil_frame_set_pkt_dts(value _frame, value _dts) { CAMLparam2(_frame, _dts); AVFrame *frame = Frame_val(_frame); if (_dts == Val_none) frame->pkt_dts = AV_NOPTS_VALUE; else frame->pkt_dts = Int64_val(Field(_dts, 0)); CAMLreturn(Val_unit); } CAMLprim value ocaml_avutil_frame_metadata(value _frame) { CAMLparam1(_frame); CAMLlocal4(ans, key, val, pair); AVFrame *frame = Frame_val(_frame); AVDictionary *metadata = frame->metadata; AVDictionaryEntry *entry = NULL; int count = av_dict_count(metadata); int i; ans = caml_alloc_tuple(count); for (i = 0; i < count; i++) { pair = caml_alloc_tuple(2); entry = av_dict_get(metadata, "", entry, AV_DICT_IGNORE_SUFFIX); Store_field(pair, 0, caml_copy_string(entry->key)); Store_field(pair, 1, caml_copy_string(entry->value)); Store_field(ans, i, pair); } CAMLreturn(ans); } CAMLprim value ocaml_avutil_frame_set_metadata(value _frame, value _metadata) { CAMLparam2(_frame, _metadata); AVFrame *frame = Frame_val(_frame); AVDictionary *metadata = NULL; AVDictionaryEntry *entry = NULL; int i, ret; for (i = 0; i < Wosize_val(_metadata); i++) { ret = av_dict_set(&metadata, String_val(Field(Field(_metadata, i), 0)), String_val(Field(Field(_metadata, i), 1)), 0); if (ret < 0) ocaml_avutil_raise_error(ret); } if (frame->metadata) { av_dict_free(&frame->metadata); } frame->metadata = metadata; CAMLreturn(Val_unit); } CAMLprim value ocaml_avutil_frame_best_effort_timestamp(value _frame) { CAMLparam1(_frame); CAMLlocal1(ret); AVFrame *frame = Frame_val(_frame); if (frame->best_effort_timestamp == AV_NOPTS_VALUE) CAMLreturn(Val_none); ret = caml_alloc_tuple(1); Store_field(ret, 0, caml_copy_int64(frame->best_effort_timestamp)); CAMLreturn(ret); } CAMLprim value ocaml_avutil_frame_copy(value _src, value _dst) { CAMLparam2(_src, _dst); AVFrame *src = Frame_val(_src); AVFrame *dst = Frame_val(_dst); int ret; ret = av_frame_copy(dst, src); if (ret < 0) ocaml_avutil_raise_error(ret); CAMLreturn(Val_unit); } CAMLprim value ocaml_avutil_video_create_frame(value _w, value _h, value _format) { CAMLparam1(_format); AVFrame *frame = av_frame_alloc(); if (!frame) caml_raise_out_of_memory(); frame->format = PixelFormat_val(_format); frame->width = Int_val(_w); frame->height = Int_val(_h); int ret = av_frame_get_buffer(frame, 32); if (ret < 0) { av_frame_free(&frame); ocaml_avutil_raise_error(ret); } CAMLreturn(value_of_frame(frame)); } /* Adapted from alloc_audio_frame */ CAMLprim value ocaml_avutil_audio_create_frame(value _sample_fmt, value _channel_layout, value _samplerate, value _samples) { CAMLparam2(_sample_fmt, _channel_layout); enum AVSampleFormat sample_fmt = SampleFormat_val(_sample_fmt); uint64_t channel_layout = ChannelLayout_val(_channel_layout); int sample_rate = Int_val(_samplerate); int nb_samples = Int_val(_samples); int ret; AVFrame *frame = av_frame_alloc(); if (!frame) caml_raise_out_of_memory(); frame->format = sample_fmt; frame->channel_layout = channel_layout; frame->sample_rate = sample_rate; frame->nb_samples = nb_samples; ret = av_frame_get_buffer(frame, 0); if (ret < 0) { av_frame_free(&frame); ocaml_avutil_raise_error(ret); } CAMLreturn(value_of_frame(frame)); } CAMLprim value ocaml_avutil_audio_frame_get_sample_format(value _frame) { CAMLparam1(_frame); AVFrame *frame = Frame_val(_frame); CAMLreturn(Val_SampleFormat((enum AVSampleFormat)frame->format)); } CAMLprim value ocaml_avutil_audio_frame_get_sample_rate(value _frame) { CAMLparam1(_frame); AVFrame *frame = Frame_val(_frame); CAMLreturn(Val_int(frame->sample_rate)); } CAMLprim value ocaml_avutil_audio_frame_get_channels(value _frame) { CAMLparam1(_frame); AVFrame *frame = Frame_val(_frame); #if LIBAVFORMAT_VERSION_INT < AV_VERSION_INT(59, 19, 100) CAMLreturn(Val_int(frame->channels)); #else CAMLreturn(Val_int(frame->ch_layout.nb_channels)); #endif } CAMLprim value ocaml_avutil_audio_frame_get_channel_layout(value _frame) { CAMLparam1(_frame); AVFrame *frame = Frame_val(_frame); CAMLreturn(Val_ChannelLayout((frame->channel_layout))); } CAMLprim value ocaml_avutil_audio_frame_nb_samples(value _frame) { CAMLparam1(_frame); AVFrame *frame = Frame_val(_frame); CAMLreturn(Val_int(frame->nb_samples)); } /* Adapted from frame_copy_audio */ CAMLprim value ocaml_avutil_audio_frame_copy_samples(value _src, value _src_ofs, value _dst, value _dst_ofs, value _len) { CAMLparam2(_src, _dst); AVFrame *src = Frame_val(_src); AVFrame *dst = Frame_val(_dst); int src_ofs = Int_val(_src_ofs); int dst_ofs = Int_val(_dst_ofs); int len = Int_val(_len); int planar = av_sample_fmt_is_planar(dst->format); int channels = dst->channels; int planes = planar ? channels : 1; int i; if (src->nb_samples < src_ofs + len || dst->nb_samples < dst_ofs + len || dst->channels != src->channels || dst->channel_layout != src->channel_layout) ocaml_avutil_raise_error(AVERROR(EINVAL)); av_assert2(!src->channel_layout || src->channels == av_get_channel_layout_nb_channels(src->channel_layout)); for (i = 0; i < planes; i++) if (!dst->extended_data[i] || !src->extended_data[i]) ocaml_avutil_raise_error(AVERROR(EINVAL)); caml_release_runtime_system(); av_samples_copy(dst->extended_data, src->extended_data, dst_ofs, src_ofs, len, channels, dst->format); caml_acquire_runtime_system(); CAMLreturn(Val_unit); } CAMLprim value ocaml_avutil_video_frame_width(value _frame) { CAMLparam1(_frame); AVFrame *frame = Frame_val(_frame); CAMLreturn(Val_int(frame->width)); } CAMLprim value ocaml_avutil_video_frame_height(value _frame) { CAMLparam1(_frame); AVFrame *frame = Frame_val(_frame); CAMLreturn(Val_int(frame->height)); } CAMLprim value ocaml_avutil_video_frame_get_pixel_format(value _frame) { CAMLparam1(_frame); AVFrame *frame = Frame_val(_frame); CAMLreturn(Val_PixelFormat(frame->format)); } CAMLprim value ocaml_avutil_video_frame_get_pixel_aspect(value _frame) { CAMLparam1(_frame); CAMLlocal2(ret, ans); AVFrame *frame = Frame_val(_frame); const AVRational pixel_aspect = frame->sample_aspect_ratio; if (pixel_aspect.num == 0) CAMLreturn(Val_none); value_of_rational(&pixel_aspect, &ans); ret = caml_alloc_tuple(1); Store_field(ret, 0, ans); CAMLreturn(ret); } CAMLprim value ocaml_avutil_video_frame_get_linesize(value _frame, value _line) { CAMLparam1(_frame); AVFrame *frame = Frame_val(_frame); int line = Int_val(_line); if (line < 0 || line >= AV_NUM_DATA_POINTERS || !frame->data[line]) Fail( "Failed to get linesize from video frame : line (%d) out of boundaries", line); CAMLreturn(Val_int(frame->linesize[line])); } CAMLprim value ocaml_avutil_video_get_frame_bigarray_planes( value _frame, value _make_writable) { CAMLparam1(_frame); CAMLlocal2(ans, plane); AVFrame *frame = Frame_val(_frame); int i; if (Bool_val(_make_writable)) { int ret = av_frame_make_writable(frame); if (ret < 0) ocaml_avutil_raise_error(ret); } int nb_planes = av_pix_fmt_count_planes((enum AVPixelFormat)frame->format); if (nb_planes < 0) ocaml_avutil_raise_error(nb_planes); ans = caml_alloc_tuple(nb_planes); for (i = 0; i < nb_planes; i++) { intnat out_size = frame->linesize[i] * frame->height; plane = caml_alloc_tuple(2); Store_field(plane, 0, caml_ba_alloc(CAML_BA_C_LAYOUT | CAML_BA_UINT8, 1, frame->data[i], &out_size)); Store_field(plane, 1, Val_int(frame->linesize[i])); Store_field(ans, i, plane); } CAMLreturn(ans); } /***** AVSubtitle *****/ void static finalize_subtitle(value v) { struct AVSubtitle *subtitle = Subtitle_val(v); avsubtitle_free(subtitle); free(subtitle); } static struct custom_operations subtitle_ops = { "ocaml_avsubtitle", finalize_subtitle, custom_compare_default, custom_hash_default, custom_serialize_default, custom_deserialize_default}; value value_of_subtitle(AVSubtitle *subtitle) { value ret; if (!subtitle) Fail("Empty subtitle"); ret = caml_alloc_custom(&subtitle_ops, sizeof(AVSubtitle *), 0, 1); Subtitle_val(ret) = subtitle; return ret; } int subtitle_header_default(AVCodecContext *codec_context) { return 0; } CAMLprim value ocaml_avutil_subtitle_create_frame(value _start_time, value _end_time, value _lines) { CAMLparam3(_start_time, _end_time, _lines); CAMLlocal1(ans); int64_t start_time = Int64_val(_start_time); int64_t end_time = Int64_val(_end_time); int nb_lines = Wosize_val(_lines); AVSubtitle *subtitle = (AVSubtitle *)calloc(1, sizeof(AVSubtitle)); if (!subtitle) caml_raise_out_of_memory(); ans = value_of_subtitle(subtitle); // subtitle->start_display_time = (uint32_t)start_time; subtitle->end_display_time = (uint32_t)end_time; subtitle->pts = start_time; subtitle->rects = (AVSubtitleRect **)av_malloc_array(nb_lines, sizeof(AVSubtitleRect *)); if (!subtitle->rects) caml_raise_out_of_memory(); subtitle->num_rects = nb_lines; int i; for (i = 0; i < nb_lines; i++) { const char *text = String_val(Field(_lines, i)); subtitle->rects[i] = (AVSubtitleRect *)av_mallocz(sizeof(AVSubtitleRect)); if (!subtitle->rects[i]) caml_raise_out_of_memory(); subtitle->rects[i]->type = SUBTITLE_TEXT; subtitle->rects[i]->text = strdup(text); if (!subtitle->rects[i]->text) caml_raise_out_of_memory(); // subtitle->rects[i]->type = SUBTITLE_ASS; // subtitle->rects[i]->ass = get_dialog(i + 1, 0, NULL, NULL, text); // if( ! subtitle->rects[i]->ass) Fail( "Failed to allocate subtitle // frame"); } CAMLreturn(ans); } CAMLprim value ocaml_avutil_subtitle_to_lines(value _subtitle) { CAMLparam1(_subtitle); CAMLlocal2(ans, lines); struct AVSubtitle *subtitle = Subtitle_val(_subtitle); unsigned i, num_rects = subtitle->num_rects; lines = caml_alloc_tuple(num_rects); for (i = 0; i < num_rects; i++) { char *line = subtitle->rects[i]->text ? subtitle->rects[i]->text : subtitle->rects[i]->ass; Store_field(lines, i, caml_copy_string(line)); } ans = caml_alloc_tuple(3); Store_field(ans, 0, caml_copy_int64((int64_t)subtitle->start_display_time)); Store_field(ans, 1, caml_copy_int64((int64_t)subtitle->end_display_time)); Store_field(ans, 2, lines); CAMLreturn(ans); } CAMLprim value ocaml_avutil_get_opt(value _type, value search_children, value name, value obj) { CAMLparam2(name, obj); CAMLlocal2(ret, tmp); uint8_t *str; int64_t err, i, search_flags = 0; double d; AVRational r; int w_out, h_out; enum AVPixelFormat pf; enum AVSampleFormat sf; AVDictionary *dict = NULL; AVDictionaryEntry *dict_entry = NULL; int dict_length, dict_pos; if (Bool_val(search_children)) search_flags = AV_OPT_SEARCH_CHILDREN; switch (_type) { case PVV_String: err = av_opt_get(AvObj_val(obj), (const char *)String_val(name), search_flags, &str); if (err < 0) ocaml_avutil_raise_error(err); ret = caml_copy_string((char *)str); av_free(str); CAMLreturn(ret); break; case PVV_Int: err = av_opt_get_int((void *)obj, (const char *)String_val(name), search_flags, &i); if (err < 0) ocaml_avutil_raise_error(err); CAMLreturn(Val_int(i)); break; case PVV_Int64: err = av_opt_get_int((void *)obj, (const char *)String_val(name), search_flags, &i); if (err < 0) ocaml_avutil_raise_error(err); CAMLreturn(caml_copy_int64(i)); break; case PVV_Float: err = av_opt_get_double((void *)obj, (const char *)String_val(name), search_flags, &d); if (err < 0) ocaml_avutil_raise_error(err); CAMLreturn(caml_copy_double(d)); break; case PVV_Rational: err = av_opt_get_q((void *)obj, (const char *)String_val(name), search_flags, &r); if (err < 0) ocaml_avutil_raise_error(err); value_of_rational(&r, &ret); CAMLreturn(ret); break; case PVV_Image_size: err = av_opt_get_image_size((void *)obj, (const char *)String_val(name), search_flags, &w_out, &h_out); if (err < 0) ocaml_avutil_raise_error(err); ret = caml_alloc_tuple(2); Store_field(ret, 0, Val_int(w_out)); Store_field(ret, 1, Val_int(h_out)); CAMLreturn(ret); break; case PVV_Pixel_fmt: err = av_opt_get_pixel_fmt((void *)obj, (const char *)String_val(name), search_flags, &pf); if (err < 0) ocaml_avutil_raise_error(err); CAMLreturn(Val_PixelFormat(pf)); break; case PVV_Sample_fmt: err = av_opt_get_sample_fmt((void *)obj, (const char *)String_val(name), search_flags, &sf); if (err < 0) ocaml_avutil_raise_error(err); CAMLreturn(Val_SampleFormat(sf)); break; case PVV_Video_rate: err = av_opt_get_video_rate((void *)obj, (const char *)String_val(name), search_flags, &r); if (err < 0) ocaml_avutil_raise_error(err); value_of_rational(&r, &ret); CAMLreturn(ret); break; case PVV_Channel_layout: err = av_opt_get_channel_layout((void *)obj, (const char *)String_val(name), search_flags, &i); if (err < 0) ocaml_avutil_raise_error(err); CAMLreturn(Val_ChannelLayout(i)); break; case PVV_Dict: err = av_opt_get_dict_val((void *)obj, (const char *)String_val(name), search_flags, &dict); if (err < 0) ocaml_avutil_raise_error(err); dict_length = av_dict_count(dict); ret = caml_alloc_tuple(dict_length); for (dict_pos = 0; dict_pos < dict_length; dict_pos++) { dict_entry = av_dict_get(dict, "", dict_entry, AV_DICT_IGNORE_SUFFIX); tmp = caml_alloc_tuple(2); Store_field(tmp, 0, caml_copy_string(dict_entry->key)); Store_field(tmp, 1, caml_copy_string(dict_entry->value)); Store_field(ret, dict_pos, tmp); } av_dict_free(&dict); CAMLreturn(ret); break; default: caml_failwith("Invalid option type!"); } } CAMLprim value ocaml_avutil_av_opt_iter(value _cursor, value _class) { CAMLparam2(_cursor, _class); CAMLlocal4(_opt, _type, _tmp, _spec); int unimplement_option = 0; const AVClass *class; const struct AVOption *option; #if LIBAVUTIL_VERSION_INT < AV_VERSION_INT(56, 53, 100) const struct AVOption *cursor; #else void *cursor; #endif AVRational r; if (_cursor == Val_none) { cursor = NULL; option = NULL; class = AvClass_val(_class); } else { #if LIBAVUTIL_VERSION_INT < AV_VERSION_INT(56, 53, 100) cursor = AvOptions_val(Field(Some_val(_cursor), 0)); option = cursor; #else cursor = AvObj_val(Field(Field(Some_val(_cursor), 0), 0)); option = AvOptions_val(Field(Field(Some_val(_cursor), 0), 1)); #endif class = AvClass_val(Field(Some_val(_cursor), 1)); } if (class == NULL) CAMLreturn(Val_none); option = av_opt_next(&class, option); if (option == NULL) { do { class = #if LIBAVUTIL_VERSION_INT < AV_VERSION_INT(56, 53, 100) av_opt_child_class_next(AvClass_val(_class), class); #else av_opt_child_class_iterate(AvClass_val(_class), &cursor); #endif if (class == NULL) CAMLreturn(Val_none); option = av_opt_next(&class, option); } while (option == NULL); } _opt = caml_alloc_tuple(6); Store_field(_opt, 0, caml_copy_string(option->name)); if (option->help == NULL || strlen(option->help) == 0) Store_field(_opt, 1, Val_none); else { _tmp = caml_alloc_tuple(1); Store_field(_tmp, 0, caml_copy_string(option->help)); Store_field(_opt, 1, _tmp); } _spec = caml_alloc_tuple(3); _tmp = caml_alloc_tuple(1); Store_field(_spec, 0, Val_none); Store_field(_spec, 1, Val_none); Store_field(_spec, 2, Val_none); switch (option->type) { case AV_OPT_TYPE_CONST: _type = PVV_Constant; Store_field(_tmp, 0, value_of_avoptions(_cursor, option)); Store_field(_spec, 0, _tmp); break; case AV_OPT_TYPE_BOOL: _type = PVV_Bool; if (option->default_val.i64 >= 0) { Store_field(_tmp, 0, Val_bool(option->default_val.i64)); Store_field(_spec, 0, _tmp); } break; case AV_OPT_TYPE_CHANNEL_LAYOUT: _type = PVV_Channel_layout; if (av_get_channel_name(option->default_val.i64)) { Store_field(_tmp, 0, Val_ChannelLayout(option->default_val.i64)); Store_field(_spec, 0, _tmp); } break; case AV_OPT_TYPE_PIXEL_FMT: _type = PVV_Pixel_fmt; if (av_get_pix_fmt_name(option->default_val.i64)) { Store_field(_tmp, 0, Val_PixelFormat(option->default_val.i64)); Store_field(_spec, 0, _tmp); } break; case AV_OPT_TYPE_SAMPLE_FMT: _type = PVV_Sample_fmt; if (av_get_sample_fmt_name(option->default_val.i64)) { Store_field(_tmp, 0, Val_SampleFormat(option->default_val.i64)); Store_field(_spec, 0, _tmp); } break; case AV_OPT_TYPE_INT: _type = PVV_Int; Store_field(_tmp, 0, Val_int(option->default_val.i64)); Store_field(_spec, 0, _tmp); _tmp = caml_alloc_tuple(1); Store_field(_tmp, 0, Val_int((int)option->min)); Store_field(_spec, 1, _tmp); _tmp = caml_alloc_tuple(1); Store_field(_tmp, 0, Val_int((int)option->max)); Store_field(_spec, 2, _tmp); break; case AV_OPT_TYPE_FLAGS: _type = PVV_Flags; goto int64_opt; case AV_OPT_TYPE_INT64: _type = PVV_Int64; goto int64_opt; case AV_OPT_TYPE_UINT64: _type = PVV_UInt64; goto int64_opt; case AV_OPT_TYPE_DURATION: _type = PVV_Duration; int64_opt: Store_field(_tmp, 0, caml_copy_int64(option->default_val.i64)); Store_field(_spec, 0, _tmp); _tmp = caml_alloc_tuple(1); if (option->min <= INT64_MIN) Store_field(_tmp, 0, caml_copy_int64(INT64_MIN)); else if (option->min >= INT64_MAX) Store_field(_tmp, 0, caml_copy_int64(INT64_MAX)); else Store_field(_tmp, 0, caml_copy_int64((int64_t)option->min)); Store_field(_spec, 1, _tmp); _tmp = caml_alloc_tuple(1); if (option->max <= INT64_MIN) Store_field(_tmp, 0, caml_copy_int64(INT64_MIN)); else if (option->max >= INT64_MAX) Store_field(_tmp, 0, caml_copy_int64(INT64_MAX)); else Store_field(_tmp, 0, caml_copy_int64((int64_t)option->max)); Store_field(_spec, 2, _tmp); break; case AV_OPT_TYPE_DOUBLE: _type = PVV_Double; goto float_opt; case AV_OPT_TYPE_FLOAT: _type = PVV_Float; float_opt: Store_field(_tmp, 0, caml_copy_double(option->default_val.dbl)); Store_field(_spec, 0, _tmp); _tmp = caml_alloc_tuple(1); Store_field(_tmp, 0, caml_copy_double(option->min)); Store_field(_spec, 1, _tmp); _tmp = caml_alloc_tuple(1); Store_field(_tmp, 0, caml_copy_double(option->max)); Store_field(_spec, 2, _tmp); break; case AV_OPT_TYPE_RATIONAL: _type = PVV_Rational; Store_field(_spec, 0, _tmp); r = av_d2q(option->default_val.dbl, INT_MAX); value_of_rational(&r, &_tmp); Store_field(Field(_spec, 0), 0, _tmp); Store_field(_spec, 1, caml_alloc_tuple(1)); r = av_d2q(option->min, INT_MAX); value_of_rational(&r, &_tmp); Store_field(Field(_spec, 1), 0, _tmp); Store_field(_spec, 2, caml_alloc_tuple(1)); r = av_d2q(option->max, INT_MAX); value_of_rational(&r, &_tmp); Store_field(Field(_spec, 2), 0, _tmp); break; case AV_OPT_TYPE_COLOR: _type = PVV_Color; goto string_opt; case AV_OPT_TYPE_DICT: _type = PVV_Dict; goto string_opt; case AV_OPT_TYPE_IMAGE_SIZE: _type = PVV_Image_size; goto string_opt; case AV_OPT_TYPE_VIDEO_RATE: _type = PVV_Video_rate; goto string_opt; case AV_OPT_TYPE_BINARY: _type = PVV_Binary; goto string_opt; case AV_OPT_TYPE_STRING: _type = PVV_String; string_opt: if (option->default_val.str) { Store_field(_tmp, 0, caml_copy_string(option->default_val.str)); Store_field(_spec, 0, _tmp); } break; default: unimplement_option = 1; } _tmp = caml_alloc_tuple(2); Store_field(_tmp, 0, _type); Store_field(_tmp, 1, _spec); Store_field(_opt, 2, _tmp); Store_field(_opt, 3, Val_int(option->flags)); if (option->unit == NULL || strlen(option->unit) == 0) Store_field(_opt, 4, Val_none); else { _tmp = caml_alloc_tuple(1); Store_field(_tmp, 0, caml_copy_string(option->unit)); Store_field(_opt, 4, _tmp); } _tmp = caml_alloc_tuple(1); Store_field(_tmp, 0, caml_alloc_tuple(2)); #if LIBAVUTIL_VERSION_INT < AV_VERSION_INT(56, 53, 100) Store_field(Field(_tmp, 0), 0, value_of_avoptions(_cursor, option)); #else Store_field(Field(_tmp, 0), 0, caml_alloc_tuple(2)); Store_field(Field(Field(_tmp, 0), 0), 0, value_of_avobj(_cursor, cursor)); Store_field(Field(Field(_tmp, 0), 0), 1, value_of_avoptions(_cursor, option)); #endif Store_field(Field(_tmp, 0), 1, value_of_avclass(_class, class)); Store_field(_opt, 5, _tmp); if (unimplement_option) caml_raise_with_arg(*caml_named_value("av_opt_iter_not_implemented"), Field(_opt, 5)); _tmp = caml_alloc_tuple(1); Store_field(_tmp, 0, _opt); CAMLreturn(_tmp); } CAMLprim value ocaml_avutil_avopt_default_int64(value _opt) { CAMLparam0(); CAMLreturn(caml_copy_int64(AvOptions_val(_opt)->default_val.i64)); } CAMLprim value ocaml_avutil_avopt_default_double(value _opt) { CAMLparam0(); CAMLreturn(caml_copy_double(AvOptions_val(_opt)->default_val.dbl)); } CAMLprim value ocaml_avutil_avopt_default_string(value _opt) { CAMLparam0(); CAMLreturn(caml_copy_string(AvOptions_val(_opt)->default_val.str)); } CAMLprim value ocaml_avutil_av_opt_int_of_flag(value _flag) { CAMLparam0(); switch (_flag) { case PVV_Encoding_param: CAMLreturn(Val_int(AV_OPT_FLAG_ENCODING_PARAM)); case PVV_Decoding_param: CAMLreturn(Val_int(AV_OPT_FLAG_DECODING_PARAM)); case PVV_Audio_param: CAMLreturn(Val_int(AV_OPT_FLAG_AUDIO_PARAM)); case PVV_Video_param: CAMLreturn(Val_int(AV_OPT_FLAG_VIDEO_PARAM)); case PVV_Subtitle_param: CAMLreturn(Val_int(AV_OPT_FLAG_SUBTITLE_PARAM)); case PVV_Export: CAMLreturn(Val_int(AV_OPT_FLAG_EXPORT)); case PVV_Readonly: CAMLreturn(Val_int(AV_OPT_FLAG_READONLY)); case PVV_Bsf_param: #ifdef AV_OPT_FLAG_BSF_PARAM CAMLreturn(Val_int(AV_OPT_FLAG_BSF_PARAM)); #else CAMLreturn(Val_int(0)); #endif case PVV_Filtering_param: CAMLreturn(Val_int(AV_OPT_FLAG_FILTERING_PARAM)); case PVV_Deprecated: #ifdef AV_OPT_FLAG_DEPRECATED CAMLreturn(Val_int(AV_OPT_FLAG_DEPRECATED)); #else CAMLreturn(Val_int(0)); #endif case PVV_Child_consts: #ifdef AV_OPT_FLAG_AV_OPT_FLAG_CHILD_CONSTS CAMLreturn(Val_int(AV_OPT_FLAG_CHILD_CONSTS)); #else CAMLreturn(Val_int(0)); #endif case PVV_Runtime_param: #ifdef AV_OPT_FLAG_RUNTIME_PARAM CAMLreturn(Val_int(AV_OPT_FLAG_RUNTIME_PARAM)); #else CAMLreturn(Val_int(0)); #endif default: caml_failwith("Invalid option flag!"); } } static void finalize_buffer_ref(value v) { av_buffer_unref(&BufferRef_val(v)); } static struct custom_operations buffer_ref_ops = { "ocaml_avutil_buffer_ref", finalize_buffer_ref, custom_compare_default, custom_hash_default, custom_serialize_default, custom_deserialize_default}; CAMLprim value ocaml_avutil_create_device_context(value _device_type, value _name, value _opts) { CAMLparam2(_name, _opts); CAMLlocal3(ret, ans, unused); AVBufferRef *hw_device_ctx = NULL; AVDictionary *options = NULL; const char *name; char *key, *val; int len = Wosize_val(_opts); int i, err, count; if (caml_string_length(_name) > 0) { name = String_val(_name); } else { name = NULL; } for (i = 0; i < len; i++) { // Dictionaries copy key/values by default! key = (char *)Bytes_val(Field(Field(_opts, i), 0)); val = (char *)Bytes_val(Field(Field(_opts, i), 1)); err = av_dict_set(&options, key, val, 0); if (err < 0) { av_dict_free(&options); ocaml_avutil_raise_error(err); } } caml_release_runtime_system(); err = av_hwdevice_ctx_create(&hw_device_ctx, HwDeviceType_val(_device_type), name, options, 0); caml_acquire_runtime_system(); if (err < 0) { char errbuf[AV_ERROR_MAX_STRING_SIZE] = ""; printf( "failed with error: %s\n", av_make_error_string(errbuf, AV_ERROR_MAX_STRING_SIZE, AVERROR(err))); fflush(stdout); av_dict_free(&options); ocaml_avutil_raise_error(err); } // Return unused keys count = av_dict_count(options); unused = caml_alloc_tuple(count); AVDictionaryEntry *entry = NULL; for (i = 0; i < count; i++) { entry = av_dict_get(options, "", entry, AV_DICT_IGNORE_SUFFIX); Store_field(unused, i, caml_copy_string(entry->key)); } av_dict_free(&options); ans = caml_alloc_custom(&buffer_ref_ops, sizeof(AVBufferRef *), 0, 1); BufferRef_val(ans) = hw_device_ctx; ret = caml_alloc_tuple(2); Store_field(ret, 0, ans); Store_field(ret, 1, unused); CAMLreturn(ret); } CAMLprim value ocaml_avutil_create_frame_context(value _width, value _height, value _src_pixel_format, value _dst_pixel_format, value _device_ctx) { CAMLparam1(_device_ctx); CAMLlocal1(ans); AVBufferRef *hw_frames_ref; AVHWFramesContext *frames_ctx = NULL; int ret; hw_frames_ref = av_hwframe_ctx_alloc(BufferRef_val(_device_ctx)); if (!hw_frames_ref) caml_raise_out_of_memory(); frames_ctx = (AVHWFramesContext *)(hw_frames_ref->data); frames_ctx->format = PixelFormat_val(_dst_pixel_format); frames_ctx->sw_format = PixelFormat_val(_src_pixel_format); frames_ctx->width = Int_val(_width); frames_ctx->height = Int_val(_height); caml_release_runtime_system(); ret = av_hwframe_ctx_init(hw_frames_ref); caml_acquire_runtime_system(); if (ret < 0) { av_buffer_unref(&hw_frames_ref); ocaml_avutil_raise_error(ret); } ans = caml_alloc_custom(&buffer_ref_ops, sizeof(AVBufferRef *), 0, 1); BufferRef_val(ans) = hw_frames_ref; CAMLreturn(ans); } ocaml-ffmpeg-1.1.11/avutil/avutil_stubs.h000066400000000000000000000067411457634536500203630ustar00rootroot00000000000000#ifndef _AVUTIL_STUBS_H_ #define _AVUTIL_STUBS_H_ #include #include #include #include #include #include #include #include #include "polymorphic_variant_values_stubs.h" #define Val_none Val_int(0) #ifndef Some_val #define Some_val(v) Field(v,0) #endif #define ERROR_MSG_SIZE 256 #define EXN_ERROR "ffmpeg_exn_error" #define Fail(...) { \ snprintf(ocaml_av_exn_msg, ERROR_MSG_SIZE, __VA_ARGS__); \ caml_callback(*caml_named_value("ffmpeg_exn_failure"), caml_copy_string(ocaml_av_exn_msg)); \ } void ocaml_avutil_raise_error(int err); extern char ocaml_av_exn_msg[]; #define List_init(list) (list) = Val_emptylist #define List_add(list, cons, val) { \ (cons) = caml_alloc(2, 0); \ Store_field((cons), 0, (val)); \ Store_field((cons), 1, (list)); \ (list) = (cons); \ } /***** Global initialisation *****/ void ocaml_ffmpeg_register_thread(); /**** AVRational ****/ #define rational_of_value(v) ((AVRational){Int_val(Field((v), 0)), Int_val(Field((v), 1))}) void value_of_rational(const AVRational * r, value * pv); /**** Time format ****/ int64_t second_fractions_of_time_format(value time_format); /**** Channel layout ****/ uint64_t ChannelLayout_val(value v); value Val_ChannelLayout(uint64_t cl); /**** Sample format ****/ #define Sample_format_val(v) (Int_val(v)) enum AVSampleFormat SampleFormat_val(value v); enum AVSampleFormat AVSampleFormat_of_Sample_format(int i); value Val_SampleFormat(enum AVSampleFormat sf); enum caml_ba_kind bigarray_kind_of_AVSampleFormat(enum AVSampleFormat sf); /**** Pixel format ****/ int PixelFormat_val(value); value Val_PixelFormat(enum AVPixelFormat pf); /**** Buffer Ref ****/ #define BufferRef_val(v) (*(AVBufferRef **)Data_custom_val(v)) /**** Device Type ****/ enum AVHWDeviceType HwDeviceType_val(value v); value Val_HwDeviceType(enum AVHWDeviceType t); /***** AVFrame *****/ #define Frame_val(v) (*(struct AVFrame**)Data_custom_val(v)) value value_of_frame(AVFrame *frame); /***** AVSubtitle *****/ #define Subtitle_val(v) (*(struct AVSubtitle**)Data_custom_val(v)) value value_of_subtitle(AVSubtitle *subtitle); int subtitle_header_default(AVCodecContext *avctx); /***** AVPixelFormat *****/ #define AvPixFmtDescriptor_val(v) (*(const AVPixFmtDescriptor**)Data_abstract_val(v)) static inline value value_of_avpixfmtdescriptor(value ret, const AVPixFmtDescriptor *avpixfmtdescriptor) { ret = caml_alloc(1, Abstract_tag); AvPixFmtDescriptor_val(ret) = avpixfmtdescriptor; return ret; } /****** AVOptions ******/ #define AvClass_val(v) (*(const AVClass**)Data_abstract_val(v)) static inline value value_of_avclass(value ret, const AVClass *avclass) { ret = caml_alloc(1, Abstract_tag); AvClass_val(ret) = avclass; return ret; } #define AvOptions_val(v) (*(const struct AVOption**)Data_abstract_val(v)) static inline value value_of_avoptions(value ret, const struct AVOption *avoptions) { ret = caml_alloc(1, Abstract_tag); AvOptions_val(ret) = avoptions; return ret; } #define AvObj_val(v) (*(void**)Data_abstract_val(v)) static inline value value_of_avobj(value ret, void *avobj) { ret = caml_alloc(1, Abstract_tag); AvObj_val(ret) = avobj; return ret; } #endif // _AVUTIL_STUBS_H_ ocaml-ffmpeg-1.1.11/avutil/config/000077500000000000000000000000001457634536500167235ustar00rootroot00000000000000ocaml-ffmpeg-1.1.11/avutil/config/discover.ml000066400000000000000000000013621457634536500210750ustar00rootroot00000000000000module C = Configurator.V1 let () = C.main ~name:"ffmpeg-avutil-pkg-config" (fun c -> let default : C.Pkg_config.package_conf = { libs = ["-lavutil"; "-lavcodec"]; cflags = [] } in let conf = match C.Pkg_config.get c with | None -> default | Some pc -> ( match C.Pkg_config.query_expr_err pc ~package:"libavutil libavcodec" ~expr:"libavutil >= 55.78.100, libavcodec >= 57.107.100" with | Error msg -> failwith msg | Ok deps -> deps) in C.Flags.write_sexp "c_flags.sexp" conf.cflags; C.Flags.write_lines "c_flags" conf.cflags; C.Flags.write_sexp "c_library_flags.sexp" conf.libs) ocaml-ffmpeg-1.1.11/avutil/config/dune000066400000000000000000000000751457634536500176030ustar00rootroot00000000000000(executable (name discover) (libraries dune.configurator)) ocaml-ffmpeg-1.1.11/avutil/dune000066400000000000000000000054371457634536500163450ustar00rootroot00000000000000(library (name avutil) (public_name ffmpeg-avutil) (synopsis "Bindings to ffmpeg's avutil library") (foreign_stubs (language c) (names avutil_stubs) (flags (:include c_flags.sexp))) (c_library_flags (:include c_library_flags.sexp)) (install_c_headers avutil_stubs polymorphic_variant_values_stubs media_types_stubs) (libraries threads)) (rule (targets c_flags c_flags.sexp c_library_flags.sexp) (action (run ./config/discover.exe))) (rule (targets avutil_stubs.c) (mode fallback) (deps polymorphic_variant_values_stubs.h pixel_format_stubs.h media_types_stubs.h hw_device_type_stubs.h pixel_format_flag_stubs.h sample_format_stubs.h channel_layout_stubs.h) (action (echo "this should not happen"))) (rule (targets hw_device_type_stubs.h) (deps c_flags) (action (run ../gen_code/gen_code.exe "%{cc}" hw_device_type h %{read-lines:c_flags}))) (rule (targets hw_device_type.ml) (deps hw_device_type_stubs.h c_flags) (action (run ../gen_code/gen_code.exe "%{cc}" hw_device_type ml %{read-lines:c_flags}))) (rule (targets media_types_stubs.h) (deps c_flags) (action (run ../gen_code/gen_code.exe "%{cc}" media_types h %{read-lines:c_flags}))) (rule (targets media_types.ml) (deps media_types_stubs.h c_flags) (action (run ../gen_code/gen_code.exe "%{cc}" media_types ml %{read-lines:c_flags}))) (rule (targets polymorphic_variant_values_stubs.h) (deps c_flags) (action (run ../gen_code/gen_code.exe "%{cc}" polymorphic_variant h %{read-lines:c_flags}))) (rule (targets pixel_format_stubs.h) (deps c_flags) (action (run ../gen_code/gen_code.exe "%{cc}" pixel_format h %{read-lines:c_flags}))) (rule (targets pixel_format.ml) (deps c_flags) (action (run ../gen_code/gen_code.exe "%{cc}" pixel_format ml %{read-lines:c_flags}))) (rule (targets pixel_format_flag_stubs.h) (deps c_flags) (action (run ../gen_code/gen_code.exe "%{cc}" pixel_format_flag h %{read-lines:c_flags}))) (rule (targets pixel_format_flag.ml) (deps c_flags) (action (run ../gen_code/gen_code.exe "%{cc}" pixel_format_flag ml %{read-lines:c_flags}))) (rule (targets sample_format_stubs.h) (deps c_flags) (action (run ../gen_code/gen_code.exe "%{cc}" sample_format h %{read-lines:c_flags}))) (rule (targets sample_format.ml) (deps c_flags) (action (run ../gen_code/gen_code.exe "%{cc}" sample_format ml %{read-lines:c_flags}))) (rule (targets channel_layout_stubs.h) (deps c_flags) (action (run ../gen_code/gen_code.exe "%{cc}" channel_layout h %{read-lines:c_flags}))) (rule (targets channel_layout.ml) (deps c_flags) (action (run ../gen_code/gen_code.exe "%{cc}" channel_layout ml %{read-lines:c_flags}))) ocaml-ffmpeg-1.1.11/dune-project000066400000000000000000000050621457634536500164770ustar00rootroot00000000000000(lang dune 3.6) (name ffmpeg) (version 1.1.11) (source (github savonet/ocaml-ffmpeg)) (license LGPL-2.1-only) (authors "The Savonet Team ") (maintainers "Romain Beauxis ") (generate_opam_files true) (use_standard_c_and_cxx_flags false) (package (name ffmpeg-avutil) (synopsis "Bindings for the ffmpeg avutil libraries") (depends (conf-pkg-config :build) (conf-ffmpeg :build) (ocaml (>= 4.08.0)) dune (dune-configurator :build) base-threads) (conflicts (ffmpeg (< 0.5.0))) ) (package (name ffmpeg-avcodec) (synopsis "Bindings for the ffmpeg avcodec library") (depends (conf-pkg-config :build) (conf-ffmpeg :build) (ocaml (>= 4.08.0)) dune (dune-configurator :build) (ffmpeg-avutil (= :version))) (conflicts (ffmpeg (< 0.5.0))) ) (package (name ffmpeg-avfilter) (synopsis "Bindings for the ffmpeg avfilter library") (depends (conf-pkg-config :build) (conf-ffmpeg :build) (ocaml (>= 4.08.0)) dune (dune-configurator :build) (ffmpeg-avutil (= :version))) (conflicts (ffmpeg (< 0.5.0))) ) (package (name ffmpeg-swscale) (synopsis "Bindings for the ffmpeg swscale library") (depends (conf-pkg-config :build) (conf-ffmpeg :build) (ocaml (>= 4.08.0)) dune (dune-configurator :build) (ffmpeg-avutil (= :version))) (conflicts (ffmpeg (< 0.5.0))) ) (package (name ffmpeg-swresample) (synopsis "Bindings for the ffmpeg swresample library") (depends (conf-pkg-config :build) (conf-ffmpeg :build) (ocaml (>= 4.08.0)) dune (dune-configurator :build) (ffmpeg-avutil (= :version)) (ffmpeg-avcodec (= :version))) (conflicts (ffmpeg (< 0.5.0))) ) (package (name ffmpeg-av) (synopsis "Bindings for the ffmpeg libraries -- top-level helpers") (depends (conf-pkg-config :build) (conf-ffmpeg :build) (ocaml (>= 4.08.0)) dune (dune-configurator :build) (ffmpeg-avutil (= :version)) (ffmpeg-avcodec (= :version))) (conflicts (ffmpeg (< 0.5.0))) ) (package (name ffmpeg-avdevice) (synopsis "Bindings for the ffmpeg avdevice library") (depends (conf-pkg-config :build) (conf-ffmpeg :build) (ocaml (>= 4.08.0)) dune (dune-configurator :build) (ffmpeg-av (= :version))) (conflicts (ffmpeg (< 0.5.0))) ) (package (name ffmpeg) (synopsis "Bindings for the ffmpeg libraries") (depends (ocaml (>= 4.08.0)) dune (ffmpeg-av (= :version)) (ffmpeg-avutil (= :version)) (ffmpeg-avcodec (= :version)) (ffmpeg-avfilter (= :version)) (ffmpeg-avdevice (= :version)) (ffmpeg-swscale (= :version)) (ffmpeg-swresample (= :version))) ) ocaml-ffmpeg-1.1.11/examples/000077500000000000000000000000001457634536500157705ustar00rootroot00000000000000ocaml-ffmpeg-1.1.11/examples/all_bitstream_filters.ml000066400000000000000000000067041457634536500227030ustar00rootroot00000000000000let string_of_spec to_string { Avutil.Options.default; min; max; values } = let opt_str = function None -> "none" | Some v -> to_string v in Printf.sprintf "{default: %s, min: %s, max: %s, values: %s}" (opt_str default) (opt_str min) (opt_str max) (Printf.sprintf "[%s]" (String.concat ", " (List.map (fun (name, v) -> Printf.sprintf "%s: %s" name (to_string v)) values))) let string_of_flags flags = let string_of_flag = function | `Encoding_param -> "encoding param" | `Decoding_param -> "decoding param" | `Audio_param -> "audio param" | `Video_param -> "video param" | `Subtitle_param -> "subtitle param" | `Export -> "export" | `Readonly -> "readonly" | `Bsf_param -> "bsf param" | `Runtime_param -> "runtime param" | `Filtering_param -> "filtering param" | `Deprecated -> "deprecated" | `Child_consts -> "child constants" in String.concat ", " (List.map string_of_flag flags) let string_of_option { Avutil.Options.name; help; flags; spec } = let _type, spec = match spec with | `Flags entry -> ("flags", string_of_spec Int64.to_string entry) | `Int entry -> ("int", string_of_spec string_of_int entry) | `Int64 entry -> ("int64", string_of_spec Int64.to_string entry) | `Float entry -> ("float", string_of_spec string_of_float entry) | `Double entry -> ("double", string_of_spec string_of_float entry) | `String entry -> ("string", string_of_spec (fun v -> v) entry) | `Rational entry -> ( "rational", string_of_spec (fun { Avutil.num; den } -> Printf.sprintf "%d/%d" num den) entry ) | `Binary entry -> ("binary", string_of_spec (fun v -> v) entry) | `Dict entry -> ("dict", string_of_spec (fun v -> v) entry) | `UInt64 entry -> ("uint64", string_of_spec Int64.to_string entry) | `Image_size entry -> ("image_size", string_of_spec (fun v -> v) entry) | `Pixel_fmt entry -> ( "pixel_fmt", string_of_spec (fun p -> match Avutil.Pixel_format.to_string p with | None -> "none" | Some f -> f) entry ) | `Sample_fmt entry -> ( "sample_fmt", string_of_spec (fun f -> match Avutil.Sample_format.get_name f with | None -> "none" | Some f -> f) entry ) | `Video_rate entry -> ("video_rate", string_of_spec (fun v -> v) entry) | `Duration entry -> ("duration", string_of_spec Int64.to_string entry) | `Color entry -> ("color", string_of_spec (fun v -> v) entry) | `Channel_layout entry -> ( "channel_layout", string_of_spec Avutil.Channel_layout.get_description entry ) | `Bool entry -> ("bool", string_of_spec string_of_bool entry) in Printf.sprintf "- %s:\n type: %s\n help: %s\n flags: %s\n spec: %s" name _type (match help with None -> "none" | Some v -> v) (string_of_flags flags) spec let () = Printf.printf "====== Bitstream Filters ======\n%!"; List.iter (fun { Avcodec.BitstreamFilter.name; codecs; options } -> Printf.printf "%s:\n Codecs: %s\n Options:\n%s\n\n" name (String.concat ", " (List.map Avcodec.string_of_id codecs)) (String.concat "\n" (List.map string_of_option (Avutil.Options.opts options)))) Avcodec.BitstreamFilter.filters ocaml-ffmpeg-1.1.11/examples/all_codecs.ml000066400000000000000000000062231457634536500204150ustar00rootroot00000000000000let string_of_properties = function | `Intra_only -> "Intra only" | `Lossy -> "Lossy" | `Lossless -> "Lossless" | `Reorder -> "Reorder" | `Bitmap_sub -> "Bitmap sub" | `Text_sub -> "Text sub" | `Fields -> "fields" let string_of_media_type = function | `Unknown -> "unknown" | `Video -> "video" | `Audio -> "audio" | `Data -> "data" | `Subtitle -> "subtitle" | `Attachment -> "attachment" let print_descriptor = function | None -> "(none)\n" | Some { Avcodec.media_type; name; long_name; mime_types; properties; profiles } -> Printf.sprintf "\n\ \ Name: %s\n\ \ Media type: %s\n\ \ Long name: %s\n\ \ Mime types: %s\n\ \ Properties: %s\n\ \ Profiles:%s\n" name (string_of_media_type media_type) (Option.value ~default:"(none)" long_name) (String.concat ", " mime_types) (String.concat ", " (List.map string_of_properties properties)) (String.concat "" (List.map (fun { Avcodec.id; profile_name } -> Printf.sprintf "\n ID: %i, name: %s" id profile_name) profiles)) let () = Printf.printf "====== Audio ======\n%!"; List.iter (fun id -> Printf.printf "Available audio codec: %s\nDescriptor:%s\n" (Avcodec.Audio.string_of_id id) (print_descriptor (Avcodec.Audio.descriptor id))) Avcodec.Audio.codec_ids; List.iter (fun c -> Printf.printf "Available audio encoder: %s - %s\n%!" (Avcodec.Audio.get_name c) (Avcodec.Audio.get_description c)) Avcodec.Audio.encoders; List.iter (fun c -> Printf.printf "Available audio decoder: %s - %s\n%!" (Avcodec.Audio.get_name c) (Avcodec.Audio.get_description c)) Avcodec.Audio.decoders; Printf.printf "\n\n"; Printf.printf "====== Video ======\n%!"; List.iter (fun id -> Printf.printf "Available video codec: %s\nDescriptor:%s\n" (Avcodec.Video.string_of_id id) (print_descriptor (Avcodec.Video.descriptor id))) Avcodec.Video.codec_ids; List.iter (fun c -> Printf.printf "Available video encoder: %s - %s\n%!" (Avcodec.Video.get_name c) (Avcodec.Video.get_description c)) Avcodec.Video.encoders; List.iter (fun c -> Printf.printf "Available video decoder: %s - %s\n%!" (Avcodec.Video.get_name c) (Avcodec.Video.get_description c)) Avcodec.Video.decoders; Printf.printf "\n\n"; Printf.printf "====== Subtitle ======\n%!"; List.iter (fun id -> Printf.printf "Available subtitle codec: %s\nDescriptor:%s\n" (Avcodec.Subtitle.string_of_id id) (print_descriptor (Avcodec.Subtitle.descriptor id))) Avcodec.Subtitle.codec_ids; List.iter (fun c -> Printf.printf "Available subtitle encoder: %s - %s\n%!" (Avcodec.Subtitle.get_name c) (Avcodec.Subtitle.get_description c)) Avcodec.Subtitle.encoders; List.iter (fun c -> Printf.printf "Available subtitle decoder: %s - %s\n%!" (Avcodec.Subtitle.get_name c) (Avcodec.Subtitle.get_description c)) Avcodec.Subtitle.decoders ocaml-ffmpeg-1.1.11/examples/aresample.ml000066400000000000000000000073771457634536500203110ustar00rootroot00000000000000let () = Printexc.record_backtrace true let () = if Array.length Sys.argv < 3 then ( Printf.printf "usage: %s input_file output_file\n" Sys.argv.(0); exit 1); Avutil.Log.set_level `Debug; Avutil.Log.set_callback print_string; let src = Av.open_input Sys.argv.(1) in let dst = Av.open_output Sys.argv.(2) in let audio_codec = Avcodec.Audio.find_encoder_by_name "ac3" in let audio_params, audio_input, idx, oass = Av.find_best_audio_stream src |> fun (i, audio_input, params) -> let channel_layout = Avcodec.Audio.get_channel_layout params in let channels = Avcodec.Audio.get_nb_channels params in let sample_format = Avcodec.Audio.get_sample_format params in let sample_rate = Avcodec.Audio.get_sample_rate params in let time_base = { Avutil.num = 1; den = sample_rate } in ( params, audio_input, i, Av.new_audio_stream ~channels ~channel_layout ~sample_format ~sample_rate ~time_base ~codec:audio_codec dst ) in let frame_size = if List.mem `Variable_frame_size (Avcodec.capabilities audio_codec) then 512 else Av.get_frame_size oass in let filter = let config = Avfilter.init () in let abuffer = let time_base = Av.get_time_base audio_input in let sample_rate = Avcodec.Audio.get_sample_rate audio_params in let sample_format = Avutil.Sample_format.get_id (Avcodec.Audio.get_sample_format audio_params) in let channels = Avcodec.Audio.get_nb_channels audio_params in let channel_layout = Avutil.Channel_layout.get_id (Avcodec.Audio.get_channel_layout audio_params) in let args = [ `Pair ("time_base", `Rational time_base); `Pair ("sample_rate", `Int sample_rate); `Pair ("sample_fmt", `Int sample_format); `Pair ("channels", `Int channels); `Pair ("channel_layout", `Int64 channel_layout); ] in { Avfilter.node_name = "in"; node_args = Some args; node_pad = List.hd Avfilter.(abuffer.io.outputs.audio); } in let outputs = { Avfilter.audio = [abuffer]; video = [] } in let sink = { Avfilter.node_name = "out"; node_args = None; node_pad = List.hd Avfilter.(abuffersink.io.inputs.audio); } in let inputs = { Avfilter.audio = [sink]; video = [] } in let _ = Avfilter.parse { inputs; outputs } "aresample=22050,aformat=channel_layouts=stereo" config in Avfilter.launch config in let _, output = List.hd Avfilter.(filter.outputs.audio) in let context = output.context in Avfilter.set_frame_size context frame_size; let time_base = Avfilter.time_base context in Printf.printf "Sink info:\n\ time_base: %d/%d\n\ channels: %d\n\ channel_layout: %s\n\ sample_rate: %d\n" time_base.Avutil.num time_base.Avutil.den (Avfilter.channels context) (Avutil.Channel_layout.get_description (Avfilter.channel_layout context)) (Avfilter.sample_rate context); let process_audio i frm = try assert (i = idx); let _, input = List.hd Avfilter.(filter.inputs.audio) in input frm; let rec flush () = try Av.write_frame oass (output.handler ()); flush () with Avutil.Error `Eagain -> () in flush () with Not_found -> () in Gc.full_major (); Gc.full_major (); let rec f () = match Av.read_input ~audio_frame:[audio_input] src with | `Audio_frame (i, frame) -> process_audio i (`Frame frame); f () | exception Avutil.Error `Eof -> ( try process_audio idx `Flush with Avutil.Error `Eof -> ()) | _ -> f () in f (); Av.close src; Av.close dst; Gc.full_major (); Gc.full_major () ocaml-ffmpeg-1.1.11/examples/audio_decoding.ml000066400000000000000000000033061457634536500212610ustar00rootroot00000000000000open Avutil module FrameToS32Bytes = Swresample.Make (Swresample.Frame) (Swresample.S32Bytes) let () = if Array.length Sys.argv < 4 then ( Printf.eprintf " usage: %s input_file format audio_output_file\n\ \ API example program to show how to read audio frames from an \ input file.\n\ \ This program reads best audio frames from a file, decodes them, \ and writes decoded\n\ \ audio frames to a rawaudio file named audio_output_file.\n" Sys.argv.(0); exit 1); Log.set_level `Debug; Log.set_callback print_string; let audio_output_filename = Sys.argv.(3) ^ ".raw" in let audio_output_file = open_out_bin audio_output_filename in let format = match Av.Format.find_input_format Sys.argv.(2) with | Some f -> f | None -> failwith ("Could not find format: " ^ Sys.argv.(2)) in let input = Av.open_input ~format Sys.argv.(1) in let idx, istream, icodec = Av.find_best_audio_stream input in let options = [`Engine_soxr] in let rsp = FrameToS32Bytes.from_codec ~options icodec `Stereo 44100 in let rec f () = match Av.read_input ~audio_frame:[istream] input with | `Audio_frame (i, frame) when i = idx -> FrameToS32Bytes.convert rsp frame |> output_bytes audio_output_file; f () | exception Avutil.Error `Eof -> () | _ -> f () in f (); Av.get_input istream |> Av.close; close_out audio_output_file; Printf.printf "Play the output audio file with the command:\n\ ffplay -f %s -ac 2 -ar 44100 %s\n" (Option.get (Sample_format.get_name `S32) ^ if Sys.big_endian then "be" else "le") audio_output_filename; Gc.full_major (); Gc.full_major () ocaml-ffmpeg-1.1.11/examples/audio_device.ml000066400000000000000000000045701457634536500207500ustar00rootroot00000000000000let () = Printexc.record_backtrace true let () = if Array.length Sys.argv < 2 then ( Printf.( Av.Format.( printf "\ninput devices :\n"; Avdevice.get_audio_input_formats () |> List.iter (fun d -> printf "\t%s (%s)\n" (get_input_name d) (get_input_long_name d)); printf "\noutput devices :\n"; Avdevice.get_audio_output_formats () |> List.iter (fun d -> printf "\t%s (%s)\n" (get_output_name d) (get_output_long_name d)); printf "\n\ usage: %s input [output]\n\ input and output can be devices or file names\n" Sys.argv.(0); exit 0))); Avutil.Log.set_level `Debug; Avutil.Log.set_callback print_string; let src = try Avdevice.open_audio_input Sys.argv.(1) with Avutil.Error _ -> Av.open_input Sys.argv.(1) in let idx, ias, params = Av.find_best_audio_stream src in let codec = Avcodec.Audio.find_encoder_by_name "flac" in let channel_layout = Avcodec.Audio.get_channel_layout params in let channels = Avcodec.Audio.get_nb_channels params in let sample_format = Avcodec.Audio.get_sample_format params in let sample_rate = Avcodec.Audio.get_sample_rate params in let time_base = { Avutil.num = 1; den = sample_rate } in let dst = try if Array.length Sys.argv < 3 then Avdevice.open_default_audio_output () else Avdevice.open_audio_output Sys.argv.(2) with Avutil.Error _ -> Av.open_output Sys.argv.(2) in let dst_stream = Av.new_audio_stream ~channels ~channel_layout ~sample_format ~sample_rate ~time_base ~codec dst in Avdevice.Dev_to_app.( set_control_message_callback (function | Volume_level_changed v -> Printf.printf "Volume level changed to %f %%\n" (v *. 100.) | _ -> print_endline "Unexpected dev to app controle message")) dst; (try Avdevice.App_to_dev.(control_messages [Get_volume; Set_volume 0.3]) dst with Avutil.Error err -> prerr_endline (Avutil.string_of_error err)); let rec run n = if n > 0 then ( try (match Av.read_input src ~audio_frame:[ias] with | `Audio_frame (i, frame) when i = idx -> Av.write_frame dst_stream frame | _ -> assert false); run (n - 1) with Avutil.Error `Eof -> ()) in run 500; Av.close src; Av.close dst; Gc.full_major (); Gc.full_major () ocaml-ffmpeg-1.1.11/examples/decode_audio.ml000066400000000000000000000061611457634536500207320ustar00rootroot00000000000000open Avcodec let () = Printexc.record_backtrace true module Compat = struct let map_file _ _ _ _ _ = assert false let () = ignore map_file include Bigarray.Genarray include Unix end let () = if Array.length Sys.argv < 5 then ( Printf.eprintf " usage: %s \n\ \ API example program to show how to read audio frames from an \ input file.\n\ \ This program parse data to packets from a file, decodes them, and \ writes decoded\n\ \ audio frames to a audio file named .\n" Sys.argv.(0); exit 1); Avutil.Log.set_level `Debug; Avutil.Log.set_callback print_string; let in_codec = Audio.find_decoder_by_name Sys.argv.(2) in let parser = Audio.create_parser in_codec in let decoder = Audio.create_decoder in_codec in let in_fd = Unix.openfile Sys.argv.(1) [Unix.O_RDONLY] 0 in let out_file = Av.open_output Sys.argv.(3) in let codec = Avcodec.Audio.find_encoder_by_name Sys.argv.(4) in let channel_layout = Avcodec.Audio.find_best_channel_layout codec `Stereo in let sample_format = Avcodec.Audio.find_best_sample_format codec `Dbl in let sample_rate = Avcodec.Audio.find_best_sample_rate codec 44100 in let time_base = { Avutil.num = 1; den = sample_rate } in let out_stream = Av.new_audio_stream ~channel_layout ~sample_format ~sample_rate ~time_base ~codec out_file in let filter = ref None in let get_filter frame = match !filter with | Some f -> f | None -> let in_params = { Avfilter.Utils.sample_rate = Avutil.Audio.frame_get_sample_rate frame; channel_layout = Avutil.Audio.frame_get_channel_layout frame; sample_format = Avutil.Audio.frame_get_sample_format frame; } in let in_time_base = { Avutil.num = 1; den = sample_rate } in let out_frame_size = if List.mem `Variable_frame_size (Avcodec.capabilities codec) then 512 else Av.get_frame_size out_stream in let out_params = { Avfilter.Utils.sample_rate; sample_format; channel_layout } in let f = Avfilter.Utils.init_audio_converter ~in_params ~in_time_base ~out_params ~out_frame_size () in filter := Some f; f in let pts = ref 0L in let on_frame frame = Avutil.Frame.set_pts frame (Some !pts); pts := Int64.add !pts (Int64.of_int (Avutil.Audio.frame_nb_samples frame)); Av.write_frame out_stream frame in let write_frame frame = let filter = get_filter frame in Avfilter.Utils.convert_audio filter on_frame (`Frame frame) in Compat.map_file in_fd Bigarray.Int8_unsigned Bigarray.c_layout false [| -1 |] |> Bigarray.array1_of_genarray |> Packet.parse_data parser @@ Avcodec.decode decoder @@ write_frame; Avcodec.flush_decoder decoder @@ write_frame; Avfilter.Utils.convert_audio (Option.get !filter) on_frame `Flush; Unix.close in_fd; Av.close out_file; Gc.full_major (); Gc.full_major () ocaml-ffmpeg-1.1.11/examples/decode_stream.ml000066400000000000000000000072711457634536500211270ustar00rootroot00000000000000let () = Printexc.record_backtrace true let () = if Array.length Sys.argv < 4 then ( Printf.eprintf " usage: %s \n\ \ API example program to show how to read audio frames from an \ input file using the streaming API.\n\ \ This program parse data to packets from a file, decodes them, and \ writes decoded\n\ \ audio frames to a audio file named .\n" Sys.argv.(0); exit 1); Avutil.Log.set_callback (fun _ -> ()); let in_fd = Unix.openfile Sys.argv.(1) [Unix.O_RDONLY] 0 in let out_file = Av.open_output Sys.argv.(2) in let codec = Avcodec.Audio.find_encoder_by_name Sys.argv.(3) in let channel_layout = Avcodec.Audio.find_best_channel_layout codec `Stereo in let sample_format = Avcodec.Audio.find_best_sample_format codec `Dbl in let sample_rate = Avcodec.Audio.find_best_sample_rate codec 44100 in let time_base = { Avutil.num = 1; den = sample_rate } in let out_stream = Av.new_audio_stream ~channel_layout ~sample_format ~sample_rate ~time_base ~codec out_file in let filter = ref None in let get_filter frame = match !filter with | Some f -> f | None -> let in_params = { Avfilter.Utils.sample_rate = Avutil.Audio.frame_get_sample_rate frame; channel_layout = Avutil.Audio.frame_get_channel_layout frame; sample_format = Avutil.Audio.frame_get_sample_format frame; } in let in_time_base = { Avutil.num = 1; den = sample_rate } in let out_frame_size = if List.mem `Variable_frame_size (Avcodec.capabilities codec) then 512 else Av.get_frame_size out_stream in let out_params = { Avfilter.Utils.sample_rate; sample_format; channel_layout } in let f = Avfilter.Utils.init_audio_converter ~in_params ~in_time_base ~out_params ~out_frame_size () in filter := Some f; f in let pts = ref 0L in let on_frame frame = Avutil.Frame.set_pts frame (Some !pts); pts := Int64.add !pts (Int64.of_int (Avutil.Audio.frame_nb_samples frame)); Av.write_frame out_stream frame in let write_frame frame = let filter = get_filter frame in Avfilter.Utils.convert_audio filter on_frame (`Frame frame) in let read = Unix.read in_fd in let seek = Unix.lseek in_fd in let container = Av.open_input_stream ~seek read in let idx, stream, params = Av.find_best_audio_stream container in let codec_id = Avcodec.Audio.get_params_id params in let sample_format = match Avcodec.Audio.get_sample_format params with | `Dbl -> "dbl" | `Dblp -> "dblp" | `Flt -> "flt" | `Fltp -> "fltp" | `None -> "none" | `S16 -> "s16" | `S16p -> "s16p" | `S32 -> "s32" | `S32p -> "s32p" | `S64 -> "s64" | `S64p -> "s64p" | `U8 -> "u8" | `U8p -> "u8p" in Printf.printf "Detected format: %s, %dHz, %d channels, %s\n%!" (Avcodec.Audio.string_of_id codec_id) (Avcodec.Audio.get_sample_rate params) (Avcodec.Audio.get_nb_channels params) sample_format; let rec f () = try (match Av.read_input container ~audio_frame:[stream] with | `Audio_frame (i, frame) when i = idx -> write_frame frame | _ -> assert false); f () with | Avutil.Error `Eof -> Avfilter.Utils.convert_audio (Option.get !filter) on_frame `Flush | Avutil.Error `Invalid_data -> f () in f (); Unix.close in_fd; Av.close container; Av.close out_file; Gc.full_major (); Gc.full_major () ocaml-ffmpeg-1.1.11/examples/decoding.ml000066400000000000000000000027051457634536500201020ustar00rootroot00000000000000let () = Printexc.record_backtrace true let () = if Array.length Sys.argv < 2 then ( Printf.printf "usage: %s input_file\n" Sys.argv.(0); exit 1); Avutil.Log.set_level `Debug; Avutil.Log.set_callback print_string; let src = Av.open_input Sys.argv.(1) in let mk_audio_decoder (pos, _, params) = ( pos, Avcodec.Audio.create_decoder ~params Avcodec.Audio.(find_decoder (get_params_id params)) ) in let iass = Av.get_audio_streams src in let dass = List.map mk_audio_decoder iass in let mk_video_decoder (pos, _, params) = ( pos, Avcodec.Video.create_decoder ~params Avcodec.Video.(find_decoder (get_params_id params)) ) in let ivss = Av.get_video_streams src in let dvss = List.map mk_video_decoder ivss in let rec f () = match Av.read_input ~audio_packet:(List.map (fun (_, s, _) -> s) iass) ~video_packet:(List.map (fun (_, s, _) -> s) ivss) src with | `Audio_packet (i, pkt) -> Avcodec.decode (List.assoc i dass) (fun _ -> ()) pkt; f () | `Video_packet (i, pkt) -> Avcodec.decode (List.assoc i dvss) (fun _ -> ()) pkt; f () | exception Avutil.Error `Eof -> () | _ -> assert false in f (); List.iter (fun (_, d) -> Avcodec.flush_decoder d (fun _ -> ())) dass; List.iter (fun (_, d) -> Avcodec.flush_decoder d (fun _ -> ())) dvss; Av.close src; Gc.full_major (); Gc.full_major () ocaml-ffmpeg-1.1.11/examples/demuxing_decoding.ml000066400000000000000000000053301457634536500217770ustar00rootroot00000000000000open Avutil module AudioConverter = Swresample.Make (Swresample.Frame) (Swresample.S32Bytes) module VideoConverter = Swscale.Make (Swscale.Frame) (Swscale.BigArray) (* module VideoConverter = Swscale.Make (Swscale.Frame) (Swscale.Frame) *) let () = if Array.length Sys.argv < 4 then ( Printf.eprintf " usage: %s input_file video_output_file audio_output_file\n\ \ API example program to show how to read frames from an input file. \n\ \ This program reads frames from a file, decodes them, and writes \ decoded\n\ \ video frames to a rawvideo file named video_output_file, and \ decoded\n\ \ audio frames to a rawaudio file named audio_output_file.\n" Sys.argv.(0); exit 1); Avutil.Log.set_level `Debug; Avutil.Log.set_callback print_string; let input_filename = Sys.argv.(1) in let video_output_filename = Sys.argv.(2) in let audio_output_filename = Sys.argv.(3) in let src = Av.open_input input_filename in Av.get_input_metadata src |> List.iter (fun (k, v) -> print_endline (k ^ " : " ^ v)); let audio_index, audio_stream, audio_codec = Av.find_best_audio_stream src in let a_ctx = AudioConverter.from_codec audio_codec `Stereo 44100 in let audio_output_file = open_out_bin audio_output_filename in let video_index, video_stream, _ = Av.find_best_video_stream src in (* let v_ctx = VideoConverter.from_codec video_codec 800 600 `Yuv420p in *) let v_ctx = VideoConverter.create [] 352 288 `Yuv420p 800 600 `Yuv420p in let video_output_file = open_out_bin video_output_filename in let rec decode () = match Av.read_input ~audio_frame:[audio_stream] ~video_frame:[video_stream] src with | `Audio_frame (idx, af) -> if idx = audio_index then AudioConverter.convert a_ctx af |> output_bytes audio_output_file; decode () | `Video_frame (idx, vf) -> if idx = video_index then VideoConverter.convert v_ctx vf |> ignore (*output_video video_output_file*); decode () | `Subtitle_frame (_, sf) -> let _, _, lines = Subtitle.frame_to_lines sf in lines |> List.iter print_endline; decode () | exception Error `Eof -> () | exception Error err -> prerr_endline (Avutil.string_of_error err) | _ -> assert false in decode (); Av.close src; close_out video_output_file; close_out audio_output_file; Printf.printf "Demuxing succeeded.\n"; Printf.printf "Play the output audio file with the command:\n\ ffplay -f %s -ac 2 -ar 44100 %s\n" (Option.get (Sample_format.get_name `S32) ^ if Sys.big_endian then "be" else "le") audio_output_filename; Gc.full_major (); Gc.full_major () ocaml-ffmpeg-1.1.11/examples/dune000066400000000000000000000042731457634536500166540ustar00rootroot00000000000000(env (dev (flags (:standard -warn-error -A)))) (executable (name hw_encode) (modules hw_encode) (libraries ffmpeg-av)) (executable (name aresample) (modules aresample) (libraries ffmpeg-av ffmpeg-avfilter)) (executable (name audio_decoding) (modules audio_decoding) (libraries ffmpeg-av ffmpeg-swresample)) (executable (name audio_device) (modules audio_device) (libraries ffmpeg-avdevice)) (executable (name decode_audio) (modules decode_audio) (libraries ffmpeg-av ffmpeg-avfilter)) (executable (name decode_stream) (modules decode_stream) (libraries ffmpeg-av ffmpeg-avfilter)) (executable (name demuxing_decoding) (modules demuxing_decoding) (libraries ffmpeg-av ffmpeg-swresample ffmpeg-swscale)) (executable (name encode_audio) (modules encode_audio) (libraries ffmpeg-avcodec ffmpeg-swresample)) (executable (name encode_stream) (modules encode_stream) (libraries ffmpeg-av ffmpeg-avfilter ffmpeg-swresample)) (executable (name encode_video) (modules encode_video) (libraries ffmpeg-av)) (executable (name encoding) (modules encoding) (libraries ffmpeg-av ffmpeg-swresample)) (executable (name fps) (modules fps) (libraries ffmpeg-av ffmpeg-avfilter)) (executable (name fps_samplerate) (modules fps_samplerate) (libraries ffmpeg-av ffmpeg-avfilter)) (executable (name list_filters) (modules list_filters) (libraries ffmpeg-avfilter)) (executable (name player) (modules player) (libraries ffmpeg-av ffmpeg-avdevice)) (executable (name webcam) (modules webcam) (libraries ffmpeg-av ffmpeg-avdevice)) (executable (name read_metadata) (modules read_metadata) (libraries ffmpeg-av)) (executable (name remuxing) (modules remuxing) (libraries ffmpeg-av)) (executable (name transcode_aac) (modules transcode_aac) (libraries ffmpeg-av)) (executable (name transcoding) (modules transcoding) (libraries ffmpeg-av)) (executable (name decoding) (modules decoding) (libraries ffmpeg-av)) (executable (name all_codecs) (modules all_codecs) (libraries ffmpeg-avcodec)) (executable (name all_bitstream_filters) (modules all_bitstream_filters) (libraries ffmpeg-avcodec)) (executable (name interrupt) (modules interrupt) (libraries ffmpeg-av)) ocaml-ffmpeg-1.1.11/examples/encode_audio.ml000066400000000000000000000032561457634536500207460ustar00rootroot00000000000000open Avcodec module Resampler = Swresample.Make (Swresample.FloatArray) (Swresample.Frame) let ( %> ) f g x = g (f x) let () = Printexc.record_backtrace true let () = if Array.length Sys.argv < 3 then ( Printf.eprintf "Usage: %s \n" Sys.argv.(0); exit 1); Avutil.Log.set_level `Debug; Avutil.Log.set_callback print_string; let pi = 4.0 *. atan 1.0 in let sample_rate = 44100 in let codec = Audio.find_encoder_by_name Sys.argv.(2) in let out_sample_format = Audio.find_best_sample_format codec `Dbl in let time_base = { Avutil.num = 1; den = sample_rate } in let encoder = Audio.create_encoder ~channel_layout:`Stereo ~channels:2 ~time_base ~sample_format:out_sample_format ~sample_rate codec in let () = let p = Avcodec.params encoder in Printf.printf "Codec ID: %s\n%!" Avcodec.Audio.(string_of_id (get_params_id p)) in let frame_size = if List.mem `Variable_frame_size (capabilities codec) then 512 else Audio.frame_size encoder in let out_sample_format = Audio.find_best_sample_format codec `Dbl in let rsp = Resampler.create `Mono sample_rate `Stereo ~out_sample_format sample_rate in let c = 2. *. pi *. 440. /. float_of_int sample_rate in let out_file = open_out_bin Sys.argv.(1) in for i = 0 to 2000 do Array.init (2 * frame_size) (fun t -> sin (float_of_int (t + (i * frame_size)) *. c)) |> Resampler.convert ~offset:10 ~length:frame_size rsp |> Avcodec.encode encoder (Packet.to_bytes %> output_bytes out_file) done; Avcodec.flush_encoder encoder (Packet.to_bytes %> output_bytes out_file); close_out out_file; Gc.full_major (); Gc.full_major () ocaml-ffmpeg-1.1.11/examples/encode_stream.ml000066400000000000000000000055541457634536500211430ustar00rootroot00000000000000open Avcodec module Resampler = Swresample.Make (Swresample.FloatArray) (Swresample.Frame) let () = Printexc.record_backtrace true let () = if Array.length Sys.argv < 3 then ( Printf.eprintf "Usage: %s \n" Sys.argv.(0); exit 1); Avutil.Log.set_level `Debug; Avutil.Log.set_callback print_string; let pi = 4.0 *. atan 1.0 in let sample_rate = 44100 in let codec = Audio.find_encoder_by_name Sys.argv.(2) in let out_sample_format = Audio.find_best_sample_format codec `Dbl in let out_sample_rate = if Sys.argv.(2) = "flac" then 22050 else 44100 in let rsp = Resampler.create `Mono sample_rate `Stereo ~out_sample_format out_sample_rate in let c = 2. *. pi *. 440. /. float_of_int sample_rate in let filename = Sys.argv.(1) in let format = match Av.Format.guess_output_format ~filename () with | None -> failwith "No format for filename!" | Some f -> f in let fd = Unix.(openfile filename [O_WRONLY; O_CREAT; O_TRUNC] 0o644) in let write = Unix.write fd in let seek = Unix.lseek fd in let output_opt = Hashtbl.create 2 in Hashtbl.add output_opt "packetsize" (`Int 4096); Hashtbl.add output_opt "foo" (`String "bla"); let output = Av.open_output_stream ~opts:output_opt ~seek write format in let pts = ref 0L in let time_base = { Avutil.num = 1; den = out_sample_rate } in let opts = Hashtbl.create 2 in Hashtbl.add opts "lpc_type" (`String "none"); Hashtbl.add opts "foo" (`String "bla"); let stream = Av.new_audio_stream ~channels:2 ~time_base ~sample_format:out_sample_format ~sample_rate:out_sample_rate ~codec ~opts output in let out_frame_size = if List.mem `Variable_frame_size (capabilities codec) then 512 else Av.get_frame_size stream in let audio_filter = let in_params = { Avfilter.Utils.sample_rate = out_sample_rate; channel_layout = `Stereo; sample_format = out_sample_format; } in Avfilter.Utils.init_audio_converter ~in_params ~in_time_base:time_base ~out_frame_size () in assert (Hashtbl.mem opts "foo"); if Sys.argv.(2) = "flac" then assert (not (Hashtbl.mem opts "lpc_type")) else assert (Hashtbl.mem opts "lpc_type"); assert (Hashtbl.mem output_opt "foo"); assert (not (Hashtbl.mem output_opt "packetsize")); let on_frame frame = Avutil.Frame.set_pts frame (Some !pts); pts := Int64.add !pts (Int64.of_int (Avutil.Audio.frame_nb_samples frame)); Av.write_frame stream frame in for i = 0 to 2000 do Array.init out_frame_size (fun t -> sin (float_of_int (t + (i * out_frame_size)) *. c)) |> Resampler.convert rsp |> fun frame -> Avfilter.Utils.convert_audio audio_filter on_frame (`Frame frame) done; Avfilter.Utils.convert_audio audio_filter on_frame `Flush; Av.close output; Unix.close fd; Gc.full_major (); Gc.full_major () ocaml-ffmpeg-1.1.11/examples/encode_video.ml000066400000000000000000000060061457634536500207470ustar00rootroot00000000000000open Avutil let () = Printexc.record_backtrace true let fill_yuv_image width height frame_index planes = (* Y *) let data_y, linesize_y = planes.(0) in for y = 0 to height - 1 do let off = y * linesize_y in for x = 0 to width - 1 do data_y.{x + off} <- x + y + (frame_index * 3) done done; (* Cb and Cr *) let data_cb, linesize_cb = planes.(1) in let data_cr, _ = planes.(2) in for y = 0 to (height / 2) - 1 do let off = y * linesize_cb in for x = 0 to (width / 2) - 1 do data_cb.{x + off} <- 128 + y + (frame_index * 2); data_cr.{x + off} <- 64 + x + (frame_index * 5) done done let () = if Array.length Sys.argv < 4 then ( Printf.eprintf "Usage: %s \n" Sys.argv.(0); exit 1); Avutil.Log.set_level `Debug; Avutil.Log.set_callback print_string; let width = 352 in let height = 288 in let pixel_format = Avutil.Pixel_format.of_string Sys.argv.(2) in let () = let string_of_flag = function | `Be -> "be" | `Pal -> "pal" | `Bitstream -> "bitstream" | `Hwaccel -> "hwaccel" | `Planar -> "planar" | `Rgb -> "rgb" | `Pseudopal -> "pseudopal" | `Alpha -> "alpha" | `Bayer -> "bayer" | `Float -> "float" in let string_of_comp { Avutil.Pixel_format.plane; step; shift; offset; depth } = Printf.sprintf "plane: %i, step: %i, shift: %i, offset: %i, depth: %i" plane step shift offset depth in let descriptor = Avutil.Pixel_format.descriptor pixel_format in Printf.printf "Pixel format:\n\ name: %s\n\ nb_components: %i\n\ log2_chroma_w: %i\n\ log2_chroma_h: %i\n\ flags: %s\n\ comp: [\n\ \ %s\n\ ]\n\ alias: %s\n\ bits: %i\n" descriptor.Avutil.Pixel_format.name descriptor.Avutil.Pixel_format.nb_components descriptor.Avutil.Pixel_format.log2_chroma_w descriptor.Avutil.Pixel_format.log2_chroma_h (String.concat ", " (List.map string_of_flag descriptor.Avutil.Pixel_format.flags)) (String.concat ",\n " (List.map string_of_comp descriptor.Avutil.Pixel_format.comp)) (match descriptor.Avutil.Pixel_format.alias with | None -> "N/A" | Some a -> a) (Avutil.Pixel_format.bits descriptor) in let codec = Avcodec.Video.find_encoder_by_name Sys.argv.(3) in let dst = Av.open_output Sys.argv.(1) in let frame_rate = { Avutil.num = 25; den = 1 } in let time_base = { Avutil.num = 1; den = 25 } in let pts = ref 0L in let ovs = Av.new_video_stream ~width ~height ~frame_rate ~time_base ~pixel_format ~codec dst in let frame = Video.create_frame width height pixel_format in for i = 0 to 240 do Video.frame_visit ~make_writable:true (fill_yuv_image width height i) frame |> fun frame -> Avutil.Frame.set_pts frame (Some !pts); pts := Int64.succ !pts; Av.write_frame ovs frame done; Av.close dst; Gc.full_major (); Gc.full_major () ocaml-ffmpeg-1.1.11/examples/encoding.ml000066400000000000000000000073701457634536500201170ustar00rootroot00000000000000open Avutil module Resampler = Swresample.Make (Swresample.FloatArray) (Swresample.Frame) let fill_image_y width height frame_index planes = (* Y *) let data, linesize = planes.(0) in for y = 0 to height - 1 do let off = y * linesize in for x = 0 to width - 1 do data.{x + off} <- x + y + (frame_index * 3) done done let fill_image_on width height frame_index planes = fill_image_y width height frame_index planes; (* Cb and Cr *) let data_cb, _ = planes.(1) in let data_cr, linesize = planes.(2) in for y = 0 to (height / 2) - 1 do let off = y * linesize in for x = 0 to (width / 2) - 1 do data_cb.{x + off} <- 128 + y + (frame_index * 2); data_cr.{x + off} <- 64 + x + (frame_index * 5) done done let fill_image_off width height frame_index planes = fill_image_y width height frame_index planes; (* Cb and Cr *) let data_cb, _ = planes.(1) in let data_cr, linesize = planes.(2) in for y = 0 to (height / 2) - 1 do let off = y * linesize in for x = 0 to (width / 2) - 1 do data_cb.{x + off} <- 128; data_cr.{x + off} <- 128 done done let () = if Array.length Sys.argv < 5 then ( Printf.printf "Usage: %s