原来做的一个UDP语音聊天软件,用speex语音库.实现简单的IP连接聊天,效果一般,供学习用。 简单说下UDP协议。 UDP 是User Datagram Protocol的简称, 中文名是用户数据报协议,是OSI(Open System Interconnection,开放式系统互联) 参考模型中一种无连接的传输层协议,提供面向事务的简单不可靠信息传送服务,IETF RFC 768是UDP的正式规范。UDP在IP报文的协议号是17。 UDP协议全称是用户数据报协议[1] ,在网络中它与TCP协议一样用于处理数据包,是一种无连接的协议。在OSI模型中,在第四层——传输层,处于IP协议的上一层。UDP有不提供数据包分组、组装和不能对数据包进行排序的缺点,也就是说,当报文发送之后,是无法得知其是否安全完整到达的。UDP用来支持那些需要在计算机之间传输数据的网络应用。包括网络视频会议系统在内的众多的客户/服务器模式的网络应用都需要使用UDP协议。UDP协议从问世至今已经被使用了很多年,虽然其最初的光彩已经被一些类似协议所掩盖,但是即使是在今天UDP仍然不失为一项非常实用和可行的网络传输层协议。 与所熟知的TCP(传输控制协议)协议一样,UDP协议直接位于IP(网际协议)协议的顶层。根据OSI(开放系统互连)参考模型,UDP和TCP都属于传输层协议。UDP协议的主要作用是将网络数据流量压缩成数据包的形式。一个典型的数据包就是一个二进制数据的传输单位。每一个数据包的前8个字节用来包含报头信息,剩余字节则用来包含具体的传输数据。
iCall
package com.lee.call.work; import java.io.IOException; import java.net.DatagramPacket; import java.net.DatagramSocket; import java.net.SocketException; public class UDPReceiver { protected DatagramPacket dgp; protected DatagramSocket ds; public UDPReceiver(byte[] data) { try { ds = new DatagramSocket(9527); dgp = new DatagramPacket(data, data.length); } catch (SocketException e) { // TODO Auto-generated catch block e.printStackTrace(); } } public void receive() { try { ds.receive(dgp); } catch (IOException e) { // TODO Auto-generated catch block e.printStackTrace(); } } }
package com.lee.call.work; import java.io.IOException; import java.net.DatagramPacket; import java.net.DatagramSocket; import java.net.InetAddress; import java.net.SocketException; import java.net.UnknownHostException; public class UDPSender { protected DatagramPacket dgp; protected DatagramSocket ds; protected UDPSender(String ip, byte[] data) { try { ds = new DatagramSocket(); dgp = new DatagramPacket(data, data.length, InetAddress.getByName(ip), 9527); } catch (SocketException e) { // TODO Auto-generated catch block e.printStackTrace(); } catch (UnknownHostException e) { // TODO Auto-generated catch block e.printStackTrace(); } } public void send() { // TODO Auto-generated method stub try { ds.send(dgp); } catch (IOException e) { // TODO Auto-generated catch block e.printStackTrace(); } } }
package com.lee.call.work; import android.media.AudioManager; import android.media.AudioTrack; import com.lee.speex.jni.Speex; public class Player implements Runnable { private AudioTrack track; private UDPReceiver receiver; private short[] pcmFrame = new short[AudioParam.FRAME_SIZE]; private byte[] encodedFrame = new byte[AudioParam.getDefaultQuality()]; private boolean running; protected Player() { // TODO Auto-generated constructor stub receiver = new UDPReceiver(encodedFrame); int trackBufferSize = AudioTrack.getMinBufferSize( AudioParam.sampleRateInHz, AudioParam.channelOutConfig, AudioParam.audioFormat); track = new AudioTrack(AudioManager.STREAM_MUSIC, AudioParam.sampleRateInHz, AudioParam.channelOutConfig, AudioParam.audioFormat, trackBufferSize, AudioTrack.MODE_STREAM); new Thread(this).start(); } @Override public void run() { // TODO Auto-generated method stub running = true; track.play(); while (running) { receiver.receive(); Speex.decode(encodedFrame, pcmFrame, encodedFrame.length); track.write(pcmFrame, 0, AudioParam.FRAME_SIZE); } track.stop(); } public boolean isRunning() { return running; } public void setRunning(boolean running) { this.running = running; } }
package com.lee.call.work; import android.media.AudioRecord; import android.media.MediaRecorder; import com.lee.speex.jni.Speex; public class Recorder implements Runnable { private AudioRecord recorder; private short[] pcmFrame = new short[AudioParam.FRAME_SIZE]; private byte[] encodedFrame = new byte[AudioParam.getDefaultQuality()]; private UDPSender sender; private boolean running; protected Recorder(String ip) { // TODO Auto-generated constructor stub sender = new UDPSender(ip, encodedFrame); int recordBufferSize = AudioRecord.getMinBufferSize( AudioParam.sampleRateInHz, AudioParam.channelInConfig, AudioParam.audioFormat); recorder = new AudioRecord(MediaRecorder.AudioSource.MIC, AudioParam.sampleRateInHz, AudioParam.channelInConfig, AudioParam.audioFormat, recordBufferSize); new Thread(this).start(); } @Override public void run() { // TODO Auto-generated method stub running = true; recorder.startRecording(); while (running) { recorder.read(pcmFrame, 0, AudioParam.FRAME_SIZE); Speex.encode(pcmFrame, encodedFrame); sender.send(); } recorder.stop(); } public boolean isRunning() { return running; } public void setRunning(boolean running) { this.running = running; } }
package com.lee.call.work; import android.media.AudioFormat; public class AudioParam { /* * quality 1 : 4kbps (very noticeable artifacts, usually intelligible) 2 : * 6kbps (very noticeable artifacts, good intelligibility) 4 : 8kbps * (noticeable artifacts sometimes) 6 : 11kpbs (artifacts usually only * noticeable with headphones) 8 : 15kbps (artifacts not usually noticeable) */ public static final int QUALITY = 10; public static final int HZ_8000 = 8000; public static final int HZ_11025 = 11025; public static final int HZ_22050 = 22050; public static final int HZ_44100 = 44100; public static final int sampleRateInHz = HZ_8000; public static final int FRAME_SIZE = 160; public static final int FRAME_SIZE_IN_SHORTS = FRAME_SIZE; public static final int FRAME_SIZE_IN_BYTES = FRAME_SIZE << 1; public static final int channelInConfig = AudioFormat.CHANNEL_CONFIGURATION_MONO; public static final int channelOutConfig = AudioFormat.CHANNEL_CONFIGURATION_MONO; public static final int audioFormat = AudioFormat.ENCODING_PCM_16BIT; public static boolean testlocal = true; public static boolean uselib = true; private static final int[] encodedSizes = { 6, 10, 15, 20, 20, 28, 28, 38, 38, 46, 62 }; public static int getDefaultQuality() { return encodedSizes[QUALITY]; } }部分代码贴出,工程点下面的链接。
iCall
收藏的用户(0) X
正在加载信息~
推荐阅读
最新回复 (2)
- SINA1902915870 2015-5-13引用 2楼楼主有没有实现Android视频聊天的相关资料什么的??
-
站点信息
- 文章2300
- 用户1336
- 访客10865449
每日一句
To be a happy man.
做一个幸福的人。
做一个幸福的人。
新会员