Package elisa :: Package plugins :: Package good :: Package gstreamer_plugin :: Module gst_metadata
[hide private]
[frames] | no frames]

Source Code for Module elisa.plugins.good.gstreamer_plugin.gst_metadata

   1  # -*- coding: utf-8 -*- 
   2  # Elisa - Home multimedia server 
   3  # Copyright (C) 2006-2008 Fluendo Embedded S.L. (www.fluendo.com). 
   4  # All rights reserved. 
   5  # 
   6  # This file is available under one of two license agreements. 
   7  # 
   8  # This file is licensed under the GPL version 2. 
   9  # See "LICENSE.GPL" in the root of this distribution including a special 
  10  # exception to use Elisa with Fluendo's plugins. 
  11  # 
  12  # The GPL part of Elisa is also available under a commercial licensing 
  13  # agreement from Fluendo. 
  14  # See "LICENSE.Elisa" in the root directory of this distribution package 
  15  # for details on that license. 
  16   
  17  import gobject 
  18  gobject.threads_init() 
  19  import pygst 
  20  pygst.require('0.10') 
  21  import gst 
  22  import os 
  23  import sys 
  24  import time 
  25  from threading import Lock 
  26  import platform 
  27   
  28  from elisa.base_components.metadata_provider import MetadataProvider 
  29  from elisa.core.log import Loggable 
  30  from elisa.core.media_uri import MediaUri 
  31  from elisa.core.utils import locale_helper 
  32  from elisa.core.component import InitializeFailure 
  33   
  34  from twisted.internet import defer, reactor, error 
  35  from twisted.internet.protocol import ProcessProtocol 
  36  from twisted.internet.stdio import StandardIO 
  37  from twisted.spread import pb, flavors, jelly 
  38  from threading import Lock 
  39   
  40  import pkg_resources 
  41   
  42  import PIL 
  43  from PIL import PngImagePlugin 
  44  import Image 
  45  import ImageStat 
  46  import md5 
  47   
  48  if platform.system() == 'windows': 
  49      import win32process 
  50   
  51  SEEK_SCHEDULED = 'scheduled' 
  52  SEEK_DONE = 'done' 
  53   
  54  THUMBNAIL_DIR = os.path.join(os.path.expanduser("~"), ".thumbnails", 'large') 
  55  THUMBNAIL_SIZE = 256 
  56  BORING_IMAGE_VARIANCE=2000 
  57   
  58  __maintainer__ = 'Alessandro Decina <alessandro@fluendo.com>' 
  59       
  60  supported_metadata_keys = set(['artist', 'album', 'song', 'track']) 
  61  media_type_keys = set(['uri', 'file_type', 'mime_type']) 
  62  supported_keys = supported_metadata_keys.union(media_type_keys) 
  63   
64 -class TimeoutError(Exception):
65 - def __init__(self):
66 Exception.__init__(self, 'timeout')
67
68 -def able_to_handle(supported_schemes, supported_keys, metadata):
69 uri = metadata.get('uri') 70 if not uri or uri.scheme not in supported_schemes: 71 return False 72 73 request_keys = supported_keys.intersection(metadata.keys()) 74 request_empty_keys = \ 75 [key for key in request_keys if metadata[key] is None] 76 77 if request_empty_keys: 78 return True 79 80 return False
81
82 -class GstMetadataPipeline(Loggable):
83 reuse_elements = False 84 timeout = 2 85 thumb_timeout = 2 86
87 - def __init__(self):
88 super(GstMetadataPipeline, self).__init__() 89 self._pipeline = None 90 self._decodebin = None 91 self._ffmpegcolorspace = None 92 self._plugged_elements = [] 93 self._frame_locations = [1.0 / 3.0, 2.0 / 3.0, 0.1, 0.9, 0.5] 94 self._probe_lock = Lock() 95 self._thumb_probe_id = None
96 97 # other instance variables that need to be reset for each new metadata 98 # request are set directly in _reset() 99
100 - def clean(self):
101 self._clean_pipeline(finalize=True) 102 103 if self._timeout_call is not None: 104 self._timeout_call.cancel() 105 self._timeout_call = None 106 107 if self._seek_call is not None: 108 self._seek_call.cancel() 109 self._seek_call = None
110
111 - def initialize(self):
112 self._reset()
113
114 - def _clean_pipeline(self, finalize=False):
115 if not self.reuse_elements or finalize: 116 # destroy the pipeline 117 if self._pipeline is not None: 118 self._pipeline.set_state(gst.STATE_NULL) 119 self._pipeline = None 120 self._decodebin = None 121 self._ffmpegcolorspace = None 122 self._plugged_elements = [] 123 else: 124 if self._pipeline is not None: 125 self._bus.set_flushing(True) 126 self._pipeline.set_state(gst.STATE_READY) 127 128 if self._decodebin is not None: 129 self._typefind.unlink(self._decodebin) 130 self._decodebin.set_state(gst.STATE_NULL) 131 self._pipeline.remove(self._decodebin) 132 self._decodebin = None 133 134 for element in self._plugged_elements: 135 self._pipeline.remove(element) 136 element.set_state(gst.STATE_NULL) 137 self._plugged_elements = []
138
139 - def _build_pipeline(self):
140 self._pipeline = gst.Pipeline() 141 self._bus = self._pipeline.get_bus() 142 self._bus.add_signal_watch() 143 self._bus.connect('message::application', 144 self._bus_message_application_cb) 145 self._bus.connect('message::error', self._bus_message_error_cb) 146 self._bus.connect('message::eos', self._bus_message_eos_cb) 147 self._bus.connect('message::tag', self._bus_message_tag_cb) 148 self._bus.connect('message::state-changed', 149 self._bus_message_state_changed_cb) 150 self._src = gst.element_factory_make('filesrc') 151 self._typefind = gst.element_factory_make('typefind') 152 self._typefind.connect('have-type', self._typefind_have_type_cb) 153 pad = self._typefind.get_pad('src') 154 self._pipeline.add(self._src, self._typefind) 155 self._src.link(self._typefind) 156 157 self._pipeline.set_state(gst.STATE_READY)
158
159 - def _reset(self):
160 # NOTE: we call gst_element_set_state so we MUST NOT be called from the 161 # streaming thread 162 163 self._probe_lock.acquire() 164 if self._thumb_probe_id is not None: 165 self._rgb_pad.remove_buffer_probe(self._thumb_probe_id) 166 self._thumb_probe_id = None 167 self._probe_lock.release() 168 169 # destroy the current pipeline if reuse_elements == False, otherwise 170 # clean it so that it can be reused 171 self._clean_pipeline() 172 if self._pipeline is None: 173 # we're either being called from initialize() or 174 # self.reuse_elements == False 175 self._build_pipeline() 176 177 # the metadata dictionary of the current request 178 self._req_metadata = None 179 # the uri value in the metadata dictionary 180 self._req_uri = None 181 # the deferred that we callback when we finish loading stuff in 182 # self._req_metadata 183 self._req_defer = None 184 185 # the caps as given by the typefind::have-type signal 186 self._typefind_caps = None 187 self._typefind_file_type = None 188 self._typefind_mime_type = None 189 190 # the video/audio/image caps that we get from decodebin pads when 191 # we plug decodebin 192 self._video_caps = None 193 self._audio_caps = None 194 self._image_caps = None 195 196 # the taglist containing all the tags for the stream 197 self._tags = gst.TagList() 198 199 # the duration of the current stream, used to seek when doing a 200 # thumbnail 201 self._duration = None 202 # needs to be guarded with self._probe_lock 203 self._seek_status = None 204 self._seek_location_index = 0 205 self._seek_call = None 206 207 self._timeout_call = None 208 209 # timestamps used for logging purposes 210 self._start_timestamp = 0 211 self._end_timestamp = 0
212
213 - def _bus_message_error_cb(self, bus, message):
214 gerror, debug = message.parse_error() 215 if self._typefind_file_type is not None or \ 216 self._video_caps is not None or \ 217 self._audio_caps is not None or \ 218 self._image_caps is not None: 219 # we got an error going to PAUSED but we still can report the info 220 # that we got from have_type_cb 221 self.debug('error going to paused %s: %s', gerror.message, debug) 222 self._done() 223 else: 224 self._failed(Exception('%s: %s' % (gerror.message, debug)))
225
226 - def _bus_message_application_cb(self, bus, message):
227 if message.structure.get_name() == 'metadata-done': 228 self._done() 229 return 230 231 error = Exception(message.structure['error']) 232 self._failed(error)
233
234 - def _bus_message_eos_cb(self, bus, message):
235 self._done()
236
237 - def _bus_message_tag_cb(self, bus, message):
238 taglist = message.parse_tag() 239 self._tags = self._tags.merge(taglist, gst.TAG_MERGE_APPEND)
240
241 - def _bus_message_state_changed_cb(self, bus, message):
242 if message.src == self._pipeline: 243 self.debug('state changed %s' % message) 244 245 prev, current, pending = message.parse_state_changed() 246 if current == gst.STATE_PAUSED and \ 247 self._decodebin and self._decodebin.get_pad('sink').is_linked() and \ 248 (self._typefind_file_type != 'video' or self._have_video_thumbnail()): 249 self._done()
250
251 - def _typefind_have_type_cb(self, typefind, probability, caps):
252 self.debug('have type %s' % caps) 253 254 # self._typefind_caps = caps is broken, bug in the bindings 255 # FIXME: fix the bug and change this asap 256 self._typefind_caps = caps.copy() 257 gst_mime_type = self._typefind_mime_type = self._typefind_caps[0].get_name() 258 file_type = self._typefind_file_type = gst_mime_type.split('/')[0] 259 260 # NB: id3 tags most of the time are used with mp3 (even if it isn't 261 # uncommon to find them with AIFF or WAV). Given that mp3 is by far the 262 # most used audio format at the moment we make the common case fast here 263 # by assuming that the file_type is audio. By doing this we also set the 264 # mime_type to application/x-id3, but this doesn't matter at the moment 265 # since we don't use the mime_type anywhere. 266 if gst_mime_type == 'application/x-id3': 267 file_type = self._typefind_file_type = 'audio' 268 269 req_keys = set(self._req_metadata.keys()) 270 if req_keys == media_type_keys and (file_type in ('text', 'audio', 'image') or 271 (file_type == 'video' and self._have_video_thumbnail())): 272 self.debug('got media_type for %s, NOT going to paused', 273 self._req_uri) 274 # we are in the streaming thread so we post a message on the bus 275 # here and when we read it from the main thread we call _done() 276 structure = gst.Structure('metadata-done') 277 self._bus.post(gst.message_new_application(self._pipeline, structure)) 278 return 279 280 # we need tags and/or a thumbnail 281 self.debug('we need to go to PAUSED, plugging decodebin ' 282 '(file_type: %s, have_thumbnail: %s)', file_type, 283 self._have_video_thumbnail()) 284 self._plug_decodebin()
285
286 - def _plug_decodebin(self):
287 if self._decodebin is None: 288 self._decodebin = gst.element_factory_make('decodebin') 289 self._decodebin.connect('new-decoded-pad', 290 self._decodebin_new_decoded_pad_cb) 291 self._decodebin.connect('unknown-type', 292 self._decodebin_unknown_type_cb) 293 self._pipeline.add(self._decodebin) 294 295 self._typefind.link(self._decodebin) 296 pad = self._typefind.get_pad('src') 297 self._decodebin.set_state(gst.STATE_PAUSED)
298
299 - def _get_thumbnail_location(self, uri):
300 thumbnail_filename = md5.new(str(uri)).hexdigest() + ".png" 301 302 return os.path.join(THUMBNAIL_DIR, thumbnail_filename)
303
304 - def _save_thumbnail(self, thumbnail):
305 thumbnail_filename = self._get_thumbnail_location(self._req_uri) 306 307 directory = os.path.dirname(thumbnail_filename) 308 if not os.path.exists(directory): 309 try: 310 os.makedirs(directory, 0700) 311 except OSError, e: 312 msg = "Could not make directory %r: %s. Thumbnail not saved." % (directory, e) 313 self.warning(msg) 314 raise ThumbnailerError(self._req_uri, msg) 315 316 info = PngImagePlugin.PngInfo() 317 318 # required metadata 319 info.add_text("Thumb::URI", str(self._req_uri)) 320 thumbnail.save(thumbnail_filename, "png", pnginfo=info)
321
322 - def _rgb_pad_probe_cb(self, pad, buffer):
323 # Seek forward in the stream a number of times until we find a 324 # non-boring frame to thumbnail. Every time we get a boring frame we 325 # return False so the buffer is discarded and the pipeline doesn't go to 326 # PAUSED 327 328 self._probe_lock.acquire() 329 if self._seek_status is None: 330 self.debug('first probe, seeking to first location') 331 # set _seek_status and schedule a delayed call 332 self._seek_next_thumbnail_location() 333 self._probe_lock.release() 334 335 return False 336 337 if self._seek_status != SEEK_DONE: 338 # we're seeking to a better location, ignore buffers until the seek 339 # is done 340 self.debug('skipping buffer, seek status %s' % self._seek_status) 341 self._probe_lock.release() 342 343 return False 344 else: 345 self._probe_lock.release() 346 347 caps = pad.props.caps 348 if caps is None: 349 self.warning('pad %s has no caps, not doing thumbnail' % pad) 350 351 # we need the caps to get width and height of the frame but 352 # something is wrong with upstream elements, we don't do the 353 # thumbnail and let the sink preroll. This should never happen but 354 # seems to happen sometimes with ffmpeg on windows. 355 return True 356 357 caps = caps[0] 358 width = caps['width'] 359 height = caps['height'] 360 361 # we use PIL to compute the variance so while we are at it, we use PIL 362 # to do the thumbnail as well. If we depend on PIL just for this we 363 # should probably compute the variance ourselves and drop the 364 # dependency. 365 try: 366 img = Image.frombuffer("RGB", (width, height), 367 buffer, "raw", "RGB", 0, 1) 368 except Exception, e: 369 self.debug("Invalid frame: %s", e) 370 371 return False 372 373 stat = ImageStat.Stat(img) 374 boring = True 375 for i in stat.var: 376 if i > BORING_IMAGE_VARIANCE: 377 boring = False 378 break 379 380 # FIXME: we do a thumbnail if the frame isn't boring or if this is our 381 # last seek location. This is suboptimal as we should rather store all the 382 # boring thumbnails and return the least boring as a last resort. 383 if not boring or \ 384 self._seek_location_index == len(self._frame_locations): 385 if not boring: 386 self.debug("non-boring frame found") 387 else: 388 self.debug('boring frame at the last seek location... ' 389 'doing the thumbnail anyway') 390 391 img.thumbnail((THUMBNAIL_SIZE, THUMBNAIL_SIZE), Image.BILINEAR) 392 if img.mode != 'RGBA': 393 img = img.convert(mode='RGBA') 394 395 self._save_thumbnail(img) 396 397 self._probe_lock.acquire() 398 try: 399 self._rgb_pad.remove_buffer_probe(self._thumb_probe_id) 400 self._thumb_probe_id = None 401 finally: 402 self._probe_lock.release() 403 404 return True 405 406 else: 407 self.debug("boring frame, skipping") 408 self._seek_next_thumbnail_location() 409 return False 410 411 return False
412
413 - def _plug_thumbnailbin(self, video_pad):
414 if self._ffmpegcolorspace is None: 415 self._ffmpegcolorspace = gst.element_factory_make('ffmpegcolorspace') 416 self._capsfilter = gst.element_factory_make('capsfilter') 417 self._pipeline.add(self._ffmpegcolorspace, self._capsfilter) 418 self._capsfilter.props.caps = \ 419 gst.Caps('video/x-raw-rgb, bpp=24, depth=24') 420 self._ffmpegcolorspace.link(self._capsfilter) 421 self._rgb_pad = self._capsfilter.get_pad('src') 422 self._ffmpegcolorspace.set_state(gst.STATE_PAUSED) 423 self._capsfilter.set_state(gst.STATE_PAUSED) 424 video_pad.link(self._ffmpegcolorspace.get_pad('sink')) 425 sink = gst.element_factory_make('fakesink') 426 self._pipeline.add(sink) 427 self._rgb_pad.link(sink.get_pad('sink')) 428 sink.set_state(gst.STATE_PAUSED) 429 self._plugged_elements.append(sink) 430 self._thumb_probe_id = self._rgb_pad.add_buffer_probe(self._rgb_pad_probe_cb) 431 432 return self._rgb_pad
433
434 - def _have_video_thumbnail(self):
435 return os.path.exists(self._get_thumbnail_location(self._req_uri))
436
437 - def _find_decoder(self, pad):
438 target = pad.get_target() 439 element = target.get_parent() 440 klass = element.get_factory().get_klass() 441 if 'Decoder' in klass: 442 return element 443 return None
444
445 - def _get_type_from_decoder(self, decoder):
446 klass = decoder.get_factory().get_klass() 447 parts = klass.split('/', 2) 448 if len(parts) != 3: 449 return None 450 451 return parts[2].lower()
452
454 self._seek_status = SEEK_SCHEDULED 455 456 self._seek_call = \ 457 reactor.callLater(0, self._seek_next_thumbnail_location_real)
458
460 self._seek_call = None 461 self._probe_lock.acquire() 462 self._seek_status = SEEK_DONE 463 self._probe_lock.release() 464 465 if self._duration is None: 466 # first seek, get the duration 467 try: 468 self._duration, format = self._pipeline.query_duration(gst.FORMAT_TIME) 469 except gst.QueryError, e: 470 self.debug('duration query failed: %s', e) 471 472 return 473 474 if self._duration == -1: 475 self.debug('invalid duration, not seeking') 476 return 477 478 self.debug('stream duration %s' % self._duration) 479 480 if self._seek_location_index == len(self._frame_locations): 481 self.debug('no more seek locations') 482 return self._failed(Exception('no more seek locations')) 483 484 location = self._frame_locations[self._seek_location_index] 485 self.debug('seek to location %d, time %s duration %s' % 486 (self._seek_location_index, 487 gst.TIME_ARGS(int(location * self._duration)), 488 self._duration)) 489 self._seek_location_index += 1 490 491 res = self._pipeline.seek(1.0, gst.FORMAT_TIME, 492 gst.SEEK_FLAG_FLUSH | gst.SEEK_FLAG_KEY_UNIT, 493 gst.SEEK_TYPE_SET, int(location * self._duration), 494 gst.SEEK_TYPE_NONE, 0) 495 496 self.debug('seek done res %s' % res)
497
498 - def _close_pad(self, pad):
499 queue = gst.element_factory_make('queue') 500 # set the queue leaky so that if we take some time to do the thumbnail 501 # the demuxer doesnt' block on full queues 502 queue.props.leaky = 1 503 sink = gst.element_factory_make('fakesink') 504 self._pipeline.add(queue, sink) 505 # add sink before queue so when we iterate over the elements to clean 506 # them we clean the sink first and unblock the queue if it's blocked 507 # prerolling 508 self._plugged_elements.append(sink) 509 self._plugged_elements.append(queue) 510 pad.link(queue.get_pad('sink')) 511 queue.link(sink) 512 queue.set_state(gst.STATE_PAUSED) 513 sink.set_state(gst.STATE_PAUSED)
514
515 - def _get_pad_type(self, pad):
516 decoder = self._find_decoder(pad) 517 if decoder: 518 return self._get_type_from_decoder(decoder) 519 520 return pad.get_caps()[0].get_name().split('/', 1)[0]
521
522 - def _get_pad_caps(self, pad):
523 decoder = self._find_decoder(pad) 524 if decoder: 525 return decoder.get_pad('sink').get_caps() 526 527 return pad.get_caps()
528
529 - def _decodebin_new_decoded_pad_cb(self, decodebin, pad, is_last):
530 self.debug('new decoded pad %s, caps %s, is_last %s' % (pad, 531 pad.get_caps(), is_last)) 532 533 typ = self._get_pad_type(pad) 534 caps = self._get_pad_caps(pad) 535 536 if typ == 'audio': 537 if self._audio_caps is None: 538 self._audio_caps = caps 539 elif typ == 'video': 540 if self._video_caps is None: 541 self._video_caps = caps 542 # do a thumbnail of the first video track 543 self._plug_thumbnailbin(pad) 544 elif typ == 'image': 545 if self._image_caps is None: 546 self._image_caps = caps 547 548 if not pad.is_linked(): 549 self._close_pad(pad)
550
551 - def _decodebin_unknown_type_cb(self, decodebin, pad, caps):
552 self.debug('unknown pad %s, caps %s' % (pad, caps))
553
554 - def get_metadata(self, requested_metadata):
555 assert self._timeout_call is None 556 557 self._req_metadata = requested_metadata 558 self._req_uri = requested_metadata['uri'] 559 self._req_defer = defer.Deferred() 560 561 self.debug('getting metadata %s' % self._req_metadata) 562 563 self._start_timestamp = time.time() 564 system_encoding = locale_helper.system_encoding() 565 self._src.props.location = \ 566 self._req_metadata['uri'].path.encode(system_encoding) 567 568 self._timeout_call = reactor.callLater(self.timeout, self._timeout) 569 570 # reset the bus in case this is not the first request 571 self._bus.set_flushing(False) 572 self._pipeline.set_state(gst.STATE_PAUSED) 573 574 return self._req_defer
575
576 - def _get_media_type_from_caps(self, caps):
577 res = {} 578 mime_type = caps[0].get_name() 579 file_type = mime_type.split('/', 1)[0] 580 581 return {'file_type': file_type, 'mime_type': mime_type}
582
583 - def _done(self):
584 if not self._timeout_call.called: 585 self._timeout_call.cancel() 586 587 # we can't check self._seek_call.called here because we don't know if we 588 # scheduled a seek call at all 589 if self._seek_call is not None: 590 self._seek_call.cancel() 591 self._seek_call = None 592 593 self._end_timestamp = time.time() 594 595 metadata = self._req_metadata 596 metadata_defer = self._req_defer 597 598 available_metadata = {} 599 for caps in (self._video_caps, self._audio_caps, 600 self._image_caps): 601 if caps is not None: 602 available_metadata.update(self._get_media_type_from_caps(caps)) 603 break 604 605 # fallback to typefind caps 606 if available_metadata.get('file_type') is None: 607 available_metadata['file_type'] = self._typefind_file_type 608 available_metadata['mime_type'] = self._typefind_mime_type 609 610 tags = self._tags 611 612 try: 613 del tags['extended-comment'] 614 except KeyError: 615 pass 616 617 tag_keys = tags.keys() 618 for gst_key, elisa_key in (('track-number', 'track'), 619 ('title', 'song')): 620 try: 621 available_metadata[elisa_key] = tags[gst_key] 622 except KeyError: 623 pass 624 625 for key in tag_keys: 626 value = tags[key] 627 # FIXME: this was an old assumption, let's keep it until we update 628 # all the old code 629 if isinstance(value, list): 630 try: 631 value = value[0] 632 except IndexError: 633 continue 634 635 available_metadata[key] = value 636 637 for key, value in available_metadata.iteritems(): 638 try: 639 if metadata[key] is None: 640 metadata[key] = value 641 except KeyError: 642 pass 643 644 self.debug('finished getting metadata %s, elapsed time %s' % 645 (metadata, self._end_timestamp - self._start_timestamp)) 646 647 self._reset() 648 metadata_defer.callback(metadata)
649
650 - def _timeout(self, thumb_timeout=False):
651 self.debug('timeout thumb %s video caps %s', 652 thumb_timeout, self._video_caps) 653 654 if not thumb_timeout and (self._typefind_file_type == 'video' or 655 self._video_caps is not None): 656 # give some more time to the pipline if we are trying to make a 657 # thumbnail 658 self._timeout_call = \ 659 reactor.callLater(self.thumb_timeout, self._timeout, True) 660 else: 661 if self._typefind_file_type is not None or \ 662 self._video_caps is not None or \ 663 self._audio_caps is not None or \ 664 self._image_caps is not None: 665 # timeout while going to paused. This can happen on really slow 666 # machines while doing the thumbnail. Even if we didn't do the 667 # thumbnail, we have some clue about the media type here. 668 self._done() 669 else: 670 self._failed(TimeoutError())
671
672 - def _failed(self, error):
673 # cancel delayed calls 674 if not self._timeout_call.called: 675 self._timeout_call.cancel() 676 677 if self._seek_call is not None: 678 self._seek_call.cancel() 679 self._seek_call = None 680 681 self._end_timestamp = time.time() 682 683 metadata = self._req_metadata 684 metadata_defer = self._req_defer 685 self.debug('error getting metadata %s, error: %s, ' 686 'elapsed time: %s, timeout %s' % (metadata, error, 687 self._end_timestamp - self._start_timestamp, 688 self._timeout_call.called)) 689 690 self._reset() 691 metadata_defer.errback(error)
692
693 -class GstMetadata(MetadataProvider):
694 _supported_schemes = ['file'] 695
696 - def __init__(self, pipeline=None):
697 super(GstMetadata, self).__init__() 698 if pipeline is None: 699 pipeline = GstMetadataPipeline() 700 self._pipeline = pipeline 701 self._uri_cache = {} 702 self._requests = [] 703 self._running = False 704 self._process_next_call = None 705 self._next_scheduled = False 706 self._current_metadata = None 707 self._current_defer = None
708
709 - def clean(self):
710 if self._process_next_call is not None: 711 self._process_next_call.cancel() 712 self._process_next_call = None 713 self._pipeline.clean()
714
715 - def initialize(self):
716 try: 717 self._pipeline.initialize() 718 except Exception, exc: 719 msg = "Could not initialize the Pipeline: %s" % exc 720 raise InitializeFailure(msg)
721
722 - def _reset(self):
723 # timestamps used for logging purposes 724 self._start_timestamp = 0 725 self._end_timestamp = 0 726 self._next_scheduled = False
727
728 - def able_to_handle(self, metadata):
729 return able_to_handle(self._supported_schemes, 730 supported_keys, metadata)
731
732 - def get_metadata(self, metadata, low_priority=False):
733 dfr = defer.Deferred() 734 if low_priority: 735 self._requests.append((metadata, dfr)) 736 else: 737 self._requests.insert(0, (metadata, dfr)) 738 739 if not self._running: 740 self._running = True 741 self._process_next() 742 743 return dfr
744
745 - def _process_next(self):
746 assert self._process_next_call is None 747 assert not self._next_scheduled 748 self._next_scheduled = True 749 750 self._process_next_call = \ 751 reactor.callLater(0.02, self._process_next_real)
752
753 - def _process_next_real(self):
754 self._process_next_call = None 755 assert self._current_metadata is None 756 assert self._current_defer is None 757 758 self._next_scheduled = False 759 760 try: 761 self._current_metadata, self._current_defer = self._requests.pop(0) 762 except IndexError: 763 self.debug('metadata queue empty') 764 self._running = False 765 return 766 767 self.debug('getting metadata %s, queue length %d' % 768 (self._current_metadata, len(self._requests))) 769 770 self._start_timestamp = time.time() 771 772 metadata_defer = self._pipeline.get_metadata(self._current_metadata) 773 metadata_defer.addCallbacks(self._done, self._failed)
774
775 - def _done(self, metadata):
776 self._end_timestamp = time.time() 777 778 metadata, self._current_metadata = self._current_metadata, None 779 metadata_defer, self._current_defer = self._current_defer, None 780 781 self._process_next() 782 # NOTE: this can't be called from the streaming thread 783 self._reset() 784 785 metadata_defer.callback(metadata)
786
787 - def _failed(self, failure):
788 self._end_timestamp = time.time() 789 790 metadata, self._current_metadata = self._current_metadata, None 791 metadata_defer, self._current_defer = self._current_defer, None 792 793 self._process_next() 794 self._reset() 795 796 metadata_defer.errback(failure)
797
798 -class MetadataClientProcessProtocol(ProcessProtocol):
799 - def __init__(self):
800 self.broker = pb.Broker(True, jelly.DummySecurityOptions())
801
802 - def makeConnection(self, transport):
803 self.broker.makeConnection(transport) 804 ProcessProtocol.makeConnection(self, transport)
805
806 - def connectionMade(self):
807 ProcessProtocol.connectionMade(self) 808 self.broker.connectionMade()
809
810 - def outReceived(self, data):
811 self.broker.dataReceived(data)
812
813 - def errReceived(self, data):
814 sys.stderr.write(data)
815
816 - def processEnded(self, reason):
817 ProcessProtocol.processEnded(self, reason) 818 self.broker.connectionLost(reason) 819 self.factory.processEnded(reason)
820
821 -class MetadataClientProcessLauncher(Loggable, pb.PBClientFactory):
822 max_retries = 3 823 server_script = \ 824 pkg_resources.resource_filename('elisa.plugins.good.gstreamer_plugin', 825 'gst_metadata_runner.py') 826 827 log_category = 'gst_metadata_client_process_launcher' 828
829 - def __init__(self, *args, **kw):
830 Loggable.__init__(self, *args, **kw) 831 pb.PBClientFactory.__init__(self, *args, **kw) 832 Loggable.__init__(self) 833 834 self.protocol = MetadataClientProcessProtocol 835 self.path = os.path.split(sys.modules['elisa'].__path__[0])[0] 836 self.env = dict(os.environ) 837 self.env['PYTHONPATH'] = \ 838 os.pathsep.join([self.env.get('PYTHONPATH', ''), self.path]) 839 self.args = [sys.executable, '-u', self.server_script] 840 self.process = None 841 self.retries = 0 842 self.start_defer = None 843 self.stop_defer = None
844
845 - def buildProtocol(self, addr):
846 protocol = pb.PBClientFactory.buildProtocol(self, addr) 847 protocol.broker.factory = self 848 849 return protocol
850
851 - def startProcess(self):
852 def get_root_object_done(component): 853 self.debug('initializing remote component') 854 dfr = component.callRemote('initialize') 855 dfr.addCallbacks(initialize_done, initialize_failure, 856 callbackArgs=(component,), errbackArgs=(component,)) 857 858 return dfr
859 860 def initialize_done(result, component): 861 self.info('metadata server started') 862 start_defer = self.start_defer 863 self.start_defer = None 864 res = start_defer.callback(component) 865 866 return res
867 868 def initialize_failure(failure, component): 869 self.warning('failed to initialize remote component: %s', failure) 870 start_defer = self.start_defer 871 self.start_defer = None 872 res = start_defer.errback(failure) 873 874 return res 875 876 self.info('starting metadata server') 877 878 assert self.start_defer is None 879 # start_defer will be called back after initialize() has been called on 880 # the remote object 881 self.start_defer = defer.Deferred() 882 883 protocol = self.buildProtocol(None) 884 self.process = reactor.spawnProcess(protocol, 885 sys.executable, self.args, env=self.env) 886 887 if platform.system() == 'Windows': 888 import win32process 889 win32process.SetPriorityClass(self.process.hProcess, 890 win32process.IDLE_PRIORITY_CLASS) 891 892 self.retries = 0 893 self.debug('metadata process started') 894 895 # get the remote object and call initialize() on it 896 dfr = self.getRootObject() 897 dfr.addCallback(get_root_object_done) 898 899 return self.start_defer 900
901 - def stopProcess(self):
902 assert self.stop_defer is None 903 904 self.info('stopping metadata server') 905 906 if self.process is None: 907 return dfr.succeed(None) 908 909 def get_component_done(component): 910 self.debug('cleaning remote component') 911 return component.callRemote('clean')
912 913 def clean_done(component): 914 self.debug('clean done') 915 self.process.loseConnection() 916 917 return component 918 919 # stop_defer will be fired in processEnded after we terminate the child 920 # process 921 self.stop_defer = defer.Deferred() 922 923 # get the component and call clean() on it 924 dfr = self.get_component() 925 # FIXME: handle errback 926 dfr.addCallback(get_component_done) 927 dfr.addCallback(clean_done) 928 929 return self.stop_defer 930
931 - def get_component(self):
932 if self.process is None: 933 return defer.fail(Exception('process not started')) 934 935 if self.start_defer is not None: 936 # the remote object is being started 937 return self.start_defer 938 else: 939 return self.getRootObject()
940 941 # FIXME: this should be called by BaseConnector 942 #def clientConnectionLost(self, connector, reason, reconnecting): 943 944 # MetadataClientProcessProtocol callback
945 - def processEnded(self, reason):
946 self.process = None 947 948 self.log('process terminated %s', reason.getErrorMessage()) 949 950 # if reason.type == error.ProcessDone: 951 # FIXME: for some reason sometime we get ProcessDone and sometime we get 952 # ProcessTerminated when self.process.loseConnection() is called in 953 # stopProcess(). The problem is that ProcessTerminated is also used for 954 # segfault so we can't check for that here. 955 if self.stop_defer is not None: 956 self.info('metadata server stopped') 957 stop_defer = self.stop_defer 958 self.stop_defer = None 959 self._failAll(reason) 960 if stop_defer: 961 stop_defer.callback(None) 962 return 963 964 if self.retries == self.max_retries: 965 self.info('%d tries done, giving up' % self.retries) 966 # FIXME: this should be called from clientConnectionLost but i don't 967 # think that that's called at all with the process api... 968 self._failAll(reason) 969 970 return 971 972 self.retries += 1 973 self.startProcess()
974
975 -class GstMetadataClient(MetadataProvider):
976 # FIXME see able_to_handle 977 _supported_schemes = ['file'] 978 _supported_keys = set(['file_type', 'mime_type', 979 'artist', 'album', 'song', 'track']) 980
981 - def __init__(self):
982 super(GstMetadataClient, self).__init__() 983 self.launcher = MetadataClientProcessLauncher()
984
985 - def initialize(self):
986 def start_process_done(component): 987 return self
988 989 dfr = self.launcher.startProcess() 990 dfr.addCallback(start_process_done) 991 992 return dfr
993
994 - def clean(self):
995 def stop_process_done(result): 996 return super(GstMetadataClient, self).clean()
997 998 dfr = self.launcher.stopProcess() 999 dfr.addCallback(stop_process_done) 1000 1001 return dfr 1002
1003 - def able_to_handle(self, metadata):
1004 return able_to_handle(self._supported_schemes, 1005 supported_keys, metadata)
1006
1007 - def get_metadata(self, metadata, low_priority=False):
1008 def get_metadata_done(remote_metadata): 1009 # we get a normal dictionary as the result of a remote get_metadata 1010 # call but we need to return the metadata dictionary that was passed 1011 # as argument 1012 for key, value in remote_metadata.iteritems(): 1013 if value is None: 1014 continue 1015 1016 metadata[key] = value 1017 1018 return metadata
1019 1020 def got_root(root): 1021 dfr = root.callRemote('get_metadata', metadata, low_priority) 1022 dfr.addCallback(get_metadata_done) 1023 1024 return dfr 1025 1026 root_dfr = self.launcher.get_component() 1027 root_dfr.addCallback(got_root) 1028 1029 return root_dfr 1030